[ 516.246809] env[62405]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=62405) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 516.247154] env[62405]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=62405) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 516.247281] env[62405]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=62405) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 516.247616] env[62405]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 516.348504] env[62405]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=62405) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:383}} [ 516.358471] env[62405]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.010s {{(pid=62405) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:421}} [ 516.958265] env[62405]: INFO nova.virt.driver [None req-d026161d-9d36-4891-8215-6808e05ab605 None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 517.028740] env[62405]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 517.028899] env[62405]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 517.029008] env[62405]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=62405) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 520.272221] env[62405]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-2e9a024e-c651-414e-96b8-d2e3322b9ee7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.288581] env[62405]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=62405) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 520.288715] env[62405]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-7f13c580-4e60-4cf1-9ba0-46e57096669b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.329571] env[62405]: INFO oslo_vmware.api [-] Successfully established new session; session ID is c0b2a. [ 520.329715] env[62405]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 3.301s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 520.330344] env[62405]: INFO nova.virt.vmwareapi.driver [None req-d026161d-9d36-4891-8215-6808e05ab605 None None] VMware vCenter version: 7.0.3 [ 520.334008] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4491eade-7657-4795-a40e-83f81b86f779 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.354985] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c5d793b-e8c4-4dac-9ac6-d3349be729db {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.360996] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e116e7fe-5fa8-4f51-929f-63bc5f4b1e25 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.367612] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7251261a-8907-4e7f-98fe-cf856e9f18b0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.380545] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afc42bf2-0869-4910-b302-d4109feb7d06 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.386514] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86663e58-2116-41d2-83bb-db815b66d115 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.417182] env[62405]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-373a3729-11d2-412c-9408-8394e54a80a7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.422518] env[62405]: DEBUG nova.virt.vmwareapi.driver [None req-d026161d-9d36-4891-8215-6808e05ab605 None None] Extension org.openstack.compute already exists. {{(pid=62405) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:228}} [ 520.425292] env[62405]: INFO nova.compute.provider_config [None req-d026161d-9d36-4891-8215-6808e05ab605 None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 520.928593] env[62405]: DEBUG nova.context [None req-d026161d-9d36-4891-8215-6808e05ab605 None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),86d04c62-6866-42bd-b521-725704ea87ec(cell1) {{(pid=62405) load_cells /opt/stack/nova/nova/context.py:464}} [ 520.930575] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 520.930809] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 520.931554] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 520.931993] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] Acquiring lock "86d04c62-6866-42bd-b521-725704ea87ec" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 520.932202] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] Lock "86d04c62-6866-42bd-b521-725704ea87ec" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 520.933277] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] Lock "86d04c62-6866-42bd-b521-725704ea87ec" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 520.953397] env[62405]: INFO dbcounter [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] Registered counter for database nova_cell0 [ 520.962246] env[62405]: INFO dbcounter [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] Registered counter for database nova_cell1 [ 520.965466] env[62405]: DEBUG oslo_db.sqlalchemy.engines [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=62405) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:395}} [ 520.966093] env[62405]: DEBUG oslo_db.sqlalchemy.engines [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=62405) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:395}} [ 520.970678] env[62405]: ERROR nova.db.main.api [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 520.970678] env[62405]: result = function(*args, **kwargs) [ 520.970678] env[62405]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 520.970678] env[62405]: return func(*args, **kwargs) [ 520.970678] env[62405]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 520.970678] env[62405]: result = fn(*args, **kwargs) [ 520.970678] env[62405]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 520.970678] env[62405]: return f(*args, **kwargs) [ 520.970678] env[62405]: File "/opt/stack/nova/nova/objects/service.py", line 556, in _db_service_get_minimum_version [ 520.970678] env[62405]: return db.service_get_minimum_version(context, binaries) [ 520.970678] env[62405]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 520.970678] env[62405]: _check_db_access() [ 520.970678] env[62405]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 520.970678] env[62405]: stacktrace = ''.join(traceback.format_stack()) [ 520.970678] env[62405]: [ 520.971493] env[62405]: ERROR nova.db.main.api [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 520.971493] env[62405]: result = function(*args, **kwargs) [ 520.971493] env[62405]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 520.971493] env[62405]: return func(*args, **kwargs) [ 520.971493] env[62405]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 520.971493] env[62405]: result = fn(*args, **kwargs) [ 520.971493] env[62405]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 520.971493] env[62405]: return f(*args, **kwargs) [ 520.971493] env[62405]: File "/opt/stack/nova/nova/objects/service.py", line 556, in _db_service_get_minimum_version [ 520.971493] env[62405]: return db.service_get_minimum_version(context, binaries) [ 520.971493] env[62405]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 520.971493] env[62405]: _check_db_access() [ 520.971493] env[62405]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 520.971493] env[62405]: stacktrace = ''.join(traceback.format_stack()) [ 520.971493] env[62405]: [ 520.971892] env[62405]: WARNING nova.objects.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] Failed to get minimum service version for cell 86d04c62-6866-42bd-b521-725704ea87ec [ 520.971998] env[62405]: WARNING nova.objects.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 520.972443] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] Acquiring lock "singleton_lock" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 520.972605] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] Acquired lock "singleton_lock" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 520.972877] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] Releasing lock "singleton_lock" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 520.973205] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] Full set of CONF: {{(pid=62405) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/service.py:363}} [ 520.973352] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] ******************************************************************************** {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2804}} [ 520.973480] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] Configuration options gathered from: {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2805}} [ 520.973619] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2806}} [ 520.973825] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2807}} [ 520.973954] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] ================================================================================ {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2809}} [ 520.974180] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] allow_resize_to_same_host = True {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.974354] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] arq_binding_timeout = 300 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.974487] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] backdoor_port = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.974614] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] backdoor_socket = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.974778] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] block_device_allocate_retries = 60 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.974939] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] block_device_allocate_retries_interval = 3 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.975130] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] cert = self.pem {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.975297] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.975465] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] compute_monitors = [] {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.975630] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] config_dir = [] {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.975799] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] config_drive_format = iso9660 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.975935] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.976111] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] config_source = [] {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.976279] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] console_host = devstack {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.976443] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] control_exchange = nova {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.976600] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] cpu_allocation_ratio = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.976759] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] daemon = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.976933] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] debug = True {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.977096] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] default_access_ip_network_name = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.977262] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] default_availability_zone = nova {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.977417] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] default_ephemeral_format = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.977573] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] default_green_pool_size = 1000 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.977811] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.977977] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] default_schedule_zone = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.978147] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] disk_allocation_ratio = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.978309] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] enable_new_services = True {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.978484] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] enabled_apis = ['osapi_compute'] {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.978646] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] enabled_ssl_apis = [] {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.978805] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] flat_injected = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.978966] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] force_config_drive = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.979137] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] force_raw_images = True {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.979309] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] graceful_shutdown_timeout = 5 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.979471] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] heal_instance_info_cache_interval = 60 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.979686] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] host = cpu-1 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.979857] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] initial_cpu_allocation_ratio = 4.0 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.980031] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] initial_disk_allocation_ratio = 1.0 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.980197] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] initial_ram_allocation_ratio = 1.0 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.980417] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.980613] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] instance_build_timeout = 0 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.980787] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] instance_delete_interval = 300 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.980958] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] instance_format = [instance: %(uuid)s] {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.981143] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] instance_name_template = instance-%08x {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.981307] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] instance_usage_audit = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.981475] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] instance_usage_audit_period = month {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.981637] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.981800] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] instances_path = /opt/stack/data/nova/instances {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.981966] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] internal_service_availability_zone = internal {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.982135] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] key = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.982293] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] live_migration_retry_count = 30 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.982460] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] log_color = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.982623] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] log_config_append = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.982817] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.982979] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] log_dir = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.983151] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] log_file = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.983280] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] log_options = True {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.983438] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] log_rotate_interval = 1 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.983611] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] log_rotate_interval_type = days {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.983773] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] log_rotation_type = none {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.983902] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.984035] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.984210] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.984376] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.984504] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.984663] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] long_rpc_timeout = 1800 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.984823] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] max_concurrent_builds = 10 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.984978] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] max_concurrent_live_migrations = 1 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.985149] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] max_concurrent_snapshots = 5 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.985307] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] max_local_block_devices = 3 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.985461] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] max_logfile_count = 30 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.985613] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] max_logfile_size_mb = 200 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.985765] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] maximum_instance_delete_attempts = 5 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.985930] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] metadata_listen = 0.0.0.0 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.986108] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] metadata_listen_port = 8775 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.986277] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] metadata_workers = 2 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.986435] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] migrate_max_retries = -1 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.986597] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] mkisofs_cmd = genisoimage {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.986800] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] my_block_storage_ip = 10.180.1.21 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.986931] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] my_ip = 10.180.1.21 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.987143] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] my_shared_fs_storage_ip = 10.180.1.21 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.987306] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] network_allocate_retries = 0 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.987482] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.987646] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] osapi_compute_listen = 0.0.0.0 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.987809] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] osapi_compute_listen_port = 8774 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.987975] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] osapi_compute_unique_server_name_scope = {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.988158] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] osapi_compute_workers = 2 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.988317] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] password_length = 12 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.988474] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] periodic_enable = True {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.988629] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] periodic_fuzzy_delay = 60 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.988792] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] pointer_model = usbtablet {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.988957] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] preallocate_images = none {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.989128] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] publish_errors = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.989257] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] pybasedir = /opt/stack/nova {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.989412] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] ram_allocation_ratio = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.989569] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] rate_limit_burst = 0 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.989734] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] rate_limit_except_level = CRITICAL {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.989893] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] rate_limit_interval = 0 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.990063] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] reboot_timeout = 0 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.990229] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] reclaim_instance_interval = 0 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.990379] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] record = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.990546] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] reimage_timeout_per_gb = 60 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.990738] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] report_interval = 120 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.990904] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] rescue_timeout = 0 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.991076] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] reserved_host_cpus = 0 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.991237] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] reserved_host_disk_mb = 0 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.991393] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] reserved_host_memory_mb = 512 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.991550] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] reserved_huge_pages = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.991705] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] resize_confirm_window = 0 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.991863] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] resize_fs_using_block_device = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.992030] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] resume_guests_state_on_host_boot = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.992203] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.992362] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] rpc_response_timeout = 60 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.992520] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] run_external_periodic_tasks = True {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.992715] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] running_deleted_instance_action = reap {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.992864] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] running_deleted_instance_poll_interval = 1800 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.993036] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] running_deleted_instance_timeout = 0 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.993199] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] scheduler_instance_sync_interval = 120 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.993366] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] service_down_time = 720 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.993533] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] servicegroup_driver = db {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.993698] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] shell_completion = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.993857] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] shelved_offload_time = 0 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.994031] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] shelved_poll_interval = 3600 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.994204] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] shutdown_timeout = 0 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.994366] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] source_is_ipv6 = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.994526] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] ssl_only = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.994789] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.994964] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] sync_power_state_interval = 600 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.995140] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] sync_power_state_pool_size = 1000 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.995310] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] syslog_log_facility = LOG_USER {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.995466] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] tempdir = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.995623] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] timeout_nbd = 10 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.995788] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] transport_url = **** {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.995950] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] update_resources_interval = 0 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.996122] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] use_cow_images = True {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.996281] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] use_eventlog = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.996435] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] use_journal = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.996591] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] use_json = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.996748] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] use_rootwrap_daemon = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.996913] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] use_stderr = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.997076] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] use_syslog = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.997235] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] vcpu_pin_set = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.997403] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] vif_plugging_is_fatal = True {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.997568] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] vif_plugging_timeout = 300 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.997732] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] virt_mkfs = [] {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.997896] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] volume_usage_poll_interval = 0 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.998067] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] watch_log_file = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.998241] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] web = /usr/share/spice-html5 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 520.998424] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] os_brick.lock_path = /opt/stack/data/n-cpu-1 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 520.998589] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 520.998752] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] os_brick.wait_mpath_device_interval = 1 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 520.998924] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] oslo_concurrency.disable_process_locking = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 520.999519] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 520.999712] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 520.999891] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.000081] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] oslo_messaging_metrics.metrics_process_name = {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.000259] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.000428] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.000638] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] api.auth_strategy = keystone {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.000818] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] api.compute_link_prefix = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.001007] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.001193] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] api.dhcp_domain = novalocal {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.001365] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] api.enable_instance_password = True {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.001531] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] api.glance_link_prefix = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.001700] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.001874] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] api.instance_list_cells_batch_strategy = distributed {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.002056] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] api.instance_list_per_project_cells = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.002228] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] api.list_records_by_skipping_down_cells = True {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.002394] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] api.local_metadata_per_cell = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.002563] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] api.max_limit = 1000 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.002761] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] api.metadata_cache_expiration = 15 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.002952] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] api.neutron_default_tenant_id = default {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.003146] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] api.response_validation = warn {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.003316] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] api.use_neutron_default_nets = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.003489] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.003660] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] api.vendordata_dynamic_failure_fatal = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.003850] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.004040] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] api.vendordata_dynamic_ssl_certfile = {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.004218] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] api.vendordata_dynamic_targets = [] {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.004384] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] api.vendordata_jsonfile_path = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.004566] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] api.vendordata_providers = ['StaticJSON'] {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.004764] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] cache.backend = dogpile.cache.memcached {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.004935] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] cache.backend_argument = **** {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.005112] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] cache.backend_expiration_time = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.005284] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] cache.config_prefix = cache.oslo {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.005452] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] cache.dead_timeout = 60.0 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.005615] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] cache.debug_cache_backend = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.005777] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] cache.enable_retry_client = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.005941] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] cache.enable_socket_keepalive = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.006124] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] cache.enabled = True {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.006291] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] cache.enforce_fips_mode = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.006455] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] cache.expiration_time = 600 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.006616] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] cache.hashclient_retry_attempts = 2 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.006779] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] cache.hashclient_retry_delay = 1.0 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.006942] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] cache.memcache_dead_retry = 300 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.007117] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] cache.memcache_password = **** {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.007284] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.007445] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.007606] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] cache.memcache_pool_maxsize = 10 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.007766] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] cache.memcache_pool_unused_timeout = 60 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.007933] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] cache.memcache_sasl_enabled = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.008127] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] cache.memcache_servers = ['localhost:11211'] {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.008295] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] cache.memcache_socket_timeout = 1.0 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.008457] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] cache.memcache_username = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.008620] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] cache.proxies = [] {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.008783] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] cache.redis_db = 0 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.008943] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] cache.redis_password = **** {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.009134] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] cache.redis_sentinel_service_name = mymaster {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.009311] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.009480] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] cache.redis_server = localhost:6379 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.009641] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] cache.redis_socket_timeout = 1.0 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.009801] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] cache.redis_username = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.009965] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] cache.retry_attempts = 2 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.010143] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] cache.retry_delay = 0.0 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.010308] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] cache.socket_keepalive_count = 1 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.010471] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] cache.socket_keepalive_idle = 1 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.010658] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] cache.socket_keepalive_interval = 1 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.010833] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] cache.tls_allowed_ciphers = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.010994] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] cache.tls_cafile = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.011170] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] cache.tls_certfile = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.011334] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] cache.tls_enabled = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.011495] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] cache.tls_keyfile = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.011667] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] cinder.auth_section = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.011844] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] cinder.auth_type = password {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.012025] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] cinder.cafile = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.012204] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] cinder.catalog_info = volumev3::publicURL {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.012367] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] cinder.certfile = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.012532] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] cinder.collect_timing = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.012733] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] cinder.cross_az_attach = True {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.012897] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] cinder.debug = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.013068] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] cinder.endpoint_template = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.013236] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] cinder.http_retries = 3 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.013396] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] cinder.insecure = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.013553] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] cinder.keyfile = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.013724] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] cinder.os_region_name = RegionOne {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.013887] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] cinder.split_loggers = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.014053] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] cinder.timeout = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.014228] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.014388] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] compute.cpu_dedicated_set = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.014545] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] compute.cpu_shared_set = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.014708] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] compute.image_type_exclude_list = [] {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.014875] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] compute.live_migration_wait_for_vif_plug = True {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.015046] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] compute.max_concurrent_disk_ops = 0 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.015213] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] compute.max_disk_devices_to_attach = -1 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.015374] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.015544] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.015707] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] compute.resource_provider_association_refresh = 300 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.015867] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] compute.sharing_providers_max_uuids_per_request = 200 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.016039] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] compute.shutdown_retry_interval = 10 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.016222] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.016401] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] conductor.workers = 2 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.016575] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] console.allowed_origins = [] {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.016763] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] console.ssl_ciphers = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.016917] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] console.ssl_minimum_version = default {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.017100] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] consoleauth.enforce_session_timeout = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.017274] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] consoleauth.token_ttl = 600 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.017435] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] cyborg.cafile = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.017591] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] cyborg.certfile = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.017757] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] cyborg.collect_timing = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.017918] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] cyborg.connect_retries = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.018088] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] cyborg.connect_retry_delay = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.018249] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] cyborg.endpoint_override = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.018413] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] cyborg.insecure = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.018569] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] cyborg.keyfile = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.018731] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] cyborg.max_version = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.018895] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] cyborg.min_version = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.019069] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] cyborg.region_name = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.019228] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] cyborg.retriable_status_codes = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.019387] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] cyborg.service_name = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.019554] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] cyborg.service_type = accelerator {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.019718] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] cyborg.split_loggers = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.019880] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] cyborg.status_code_retries = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.020047] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] cyborg.status_code_retry_delay = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.020210] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] cyborg.timeout = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.020385] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.020565] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] cyborg.version = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.020789] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] database.asyncio_connection = **** {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.020965] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] database.asyncio_slave_connection = **** {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.021155] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] database.backend = sqlalchemy {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.021329] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] database.connection = **** {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.021497] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] database.connection_debug = 0 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.021668] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] database.connection_parameters = {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.021832] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] database.connection_recycle_time = 3600 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.021994] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] database.connection_trace = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.022173] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] database.db_inc_retry_interval = True {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.022337] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] database.db_max_retries = 20 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.022499] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] database.db_max_retry_interval = 10 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.022684] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] database.db_retry_interval = 1 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.022852] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] database.max_overflow = 50 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.023028] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] database.max_pool_size = 5 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.023199] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] database.max_retries = 10 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.023370] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] database.mysql_sql_mode = TRADITIONAL {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.023533] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] database.mysql_wsrep_sync_wait = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.023691] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] database.pool_timeout = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.023877] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] database.retry_interval = 10 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.024056] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] database.slave_connection = **** {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.024227] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] database.sqlite_synchronous = True {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.024392] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] database.use_db_reconnect = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.024560] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] api_database.asyncio_connection = **** {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.024719] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] api_database.asyncio_slave_connection = **** {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.024890] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] api_database.backend = sqlalchemy {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.025070] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] api_database.connection = **** {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.025237] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] api_database.connection_debug = 0 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.025406] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] api_database.connection_parameters = {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.025569] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] api_database.connection_recycle_time = 3600 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.025731] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] api_database.connection_trace = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.025894] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] api_database.db_inc_retry_interval = True {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.026076] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] api_database.db_max_retries = 20 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.026242] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] api_database.db_max_retry_interval = 10 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.026403] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] api_database.db_retry_interval = 1 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.026565] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] api_database.max_overflow = 50 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.026725] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] api_database.max_pool_size = 5 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.026887] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] api_database.max_retries = 10 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.027064] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.027228] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] api_database.mysql_wsrep_sync_wait = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.027387] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] api_database.pool_timeout = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.027546] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] api_database.retry_interval = 10 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.027703] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] api_database.slave_connection = **** {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.027868] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] api_database.sqlite_synchronous = True {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.028051] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] devices.enabled_mdev_types = [] {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.028232] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.028403] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] ephemeral_storage_encryption.default_format = luks {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.028565] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] ephemeral_storage_encryption.enabled = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.028726] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] ephemeral_storage_encryption.key_size = 512 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.028898] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] glance.api_servers = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.029073] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] glance.cafile = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.029240] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] glance.certfile = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.029405] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] glance.collect_timing = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.029564] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] glance.connect_retries = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.029720] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] glance.connect_retry_delay = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.029882] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] glance.debug = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.030058] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] glance.default_trusted_certificate_ids = [] {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.030224] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] glance.enable_certificate_validation = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.030385] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] glance.enable_rbd_download = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.030546] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] glance.endpoint_override = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.030721] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] glance.insecure = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.030885] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] glance.keyfile = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.031055] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] glance.max_version = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.031219] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] glance.min_version = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.031382] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] glance.num_retries = 3 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.031552] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] glance.rbd_ceph_conf = {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.031713] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] glance.rbd_connect_timeout = 5 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.031884] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] glance.rbd_pool = {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.032064] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] glance.rbd_user = {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.032226] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] glance.region_name = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.032384] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] glance.retriable_status_codes = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.032541] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] glance.service_name = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.032742] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] glance.service_type = image {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.032914] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] glance.split_loggers = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.033087] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] glance.status_code_retries = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.033244] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] glance.status_code_retry_delay = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.033399] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] glance.timeout = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.033573] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.033734] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] glance.verify_glance_signatures = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.033922] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] glance.version = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.034108] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] guestfs.debug = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.034280] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] manila.auth_section = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.034443] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] manila.auth_type = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.034603] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] manila.cafile = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.034832] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] manila.certfile = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.035035] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] manila.collect_timing = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.035204] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] manila.connect_retries = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.035366] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] manila.connect_retry_delay = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.035524] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] manila.endpoint_override = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.035688] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] manila.insecure = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.035846] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] manila.keyfile = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.036021] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] manila.max_version = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.036181] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] manila.min_version = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.036340] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] manila.region_name = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.036501] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] manila.retriable_status_codes = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.036661] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] manila.service_name = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.036835] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] manila.service_type = shared-file-system {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.036999] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] manila.share_apply_policy_timeout = 10 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.037179] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] manila.split_loggers = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.037340] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] manila.status_code_retries = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.037497] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] manila.status_code_retry_delay = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.037656] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] manila.timeout = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.037891] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] manila.valid_interfaces = ['internal', 'public'] {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.038163] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] manila.version = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.038364] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] mks.enabled = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.038719] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.038914] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] image_cache.manager_interval = 2400 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.039100] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] image_cache.precache_concurrency = 1 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.039278] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] image_cache.remove_unused_base_images = True {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.039450] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.039621] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.039799] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] image_cache.subdirectory_name = _base {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.039977] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] ironic.api_max_retries = 60 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.040160] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] ironic.api_retry_interval = 2 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.040323] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] ironic.auth_section = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.040486] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] ironic.auth_type = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.040670] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] ironic.cafile = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.040840] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] ironic.certfile = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.041012] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] ironic.collect_timing = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.041190] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] ironic.conductor_group = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.041352] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] ironic.connect_retries = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.041511] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] ironic.connect_retry_delay = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.041672] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] ironic.endpoint_override = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.041839] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] ironic.insecure = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.041997] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] ironic.keyfile = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.042172] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] ironic.max_version = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.042333] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] ironic.min_version = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.042498] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] ironic.peer_list = [] {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.042690] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] ironic.region_name = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.042854] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] ironic.retriable_status_codes = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.043030] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] ironic.serial_console_state_timeout = 10 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.043196] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] ironic.service_name = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.043367] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] ironic.service_type = baremetal {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.043528] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] ironic.shard = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.043695] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] ironic.split_loggers = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.043857] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] ironic.status_code_retries = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.044029] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] ironic.status_code_retry_delay = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.044197] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] ironic.timeout = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.044377] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.044538] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] ironic.version = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.044719] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.044895] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] key_manager.fixed_key = **** {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.045088] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.045257] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] barbican.barbican_api_version = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.045417] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] barbican.barbican_endpoint = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.045589] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] barbican.barbican_endpoint_type = public {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.045749] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] barbican.barbican_region_name = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.045910] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] barbican.cafile = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.046079] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] barbican.certfile = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.046248] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] barbican.collect_timing = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.046411] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] barbican.insecure = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.046568] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] barbican.keyfile = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.046730] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] barbican.number_of_retries = 60 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.046939] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] barbican.retry_delay = 1 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.047148] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] barbican.send_service_user_token = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.047320] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] barbican.split_loggers = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.047481] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] barbican.timeout = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.047644] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] barbican.verify_ssl = True {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.047804] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] barbican.verify_ssl_path = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.047972] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] barbican_service_user.auth_section = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.048151] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] barbican_service_user.auth_type = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.048311] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] barbican_service_user.cafile = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.048467] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] barbican_service_user.certfile = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.048629] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] barbican_service_user.collect_timing = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.048791] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] barbican_service_user.insecure = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.048953] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] barbican_service_user.keyfile = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.049128] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] barbican_service_user.split_loggers = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.049289] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] barbican_service_user.timeout = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.049454] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] vault.approle_role_id = **** {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.049613] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] vault.approle_secret_id = **** {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.049784] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] vault.kv_mountpoint = secret {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.049946] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] vault.kv_path = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.050124] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] vault.kv_version = 2 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.050288] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] vault.namespace = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.050447] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] vault.root_token_id = **** {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.050627] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] vault.ssl_ca_crt_file = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.050807] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] vault.timeout = 60.0 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.050975] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] vault.use_ssl = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.051161] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.051332] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] keystone.cafile = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.051492] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] keystone.certfile = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.051656] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] keystone.collect_timing = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.051815] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] keystone.connect_retries = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.051974] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] keystone.connect_retry_delay = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.052148] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] keystone.endpoint_override = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.052312] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] keystone.insecure = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.052472] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] keystone.keyfile = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.052629] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] keystone.max_version = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.052818] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] keystone.min_version = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.052989] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] keystone.region_name = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.053168] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] keystone.retriable_status_codes = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.053330] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] keystone.service_name = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.053500] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] keystone.service_type = identity {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.053667] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] keystone.split_loggers = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.053826] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] keystone.status_code_retries = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.053981] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] keystone.status_code_retry_delay = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.054152] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] keystone.timeout = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.054330] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.054489] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] keystone.version = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.054681] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.ceph_mount_options = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.054981] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.ceph_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.055178] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.connection_uri = {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.055344] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.cpu_mode = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.055512] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.cpu_model_extra_flags = [] {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.055680] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.cpu_models = [] {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.055854] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.cpu_power_governor_high = performance {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.056034] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.cpu_power_governor_low = powersave {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.056206] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.cpu_power_management = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.056383] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.056551] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.device_detach_attempts = 8 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.056716] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.device_detach_timeout = 20 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.056884] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.disk_cachemodes = [] {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.057056] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.disk_prefix = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.057226] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.enabled_perf_events = [] {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.057387] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.file_backed_memory = 0 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.057550] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.gid_maps = [] {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.057709] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.hw_disk_discard = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.057865] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.hw_machine_type = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.058046] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.images_rbd_ceph_conf = {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.058213] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.058376] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.058546] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.images_rbd_glance_store_name = {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.058715] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.images_rbd_pool = rbd {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.058903] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.images_type = default {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.059093] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.images_volume_group = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.059262] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.inject_key = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.059432] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.inject_partition = -2 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.059591] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.inject_password = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.059753] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.iscsi_iface = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.059919] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.iser_use_multipath = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.060090] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.live_migration_bandwidth = 0 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.060255] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.live_migration_completion_timeout = 800 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.060417] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.live_migration_downtime = 500 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.060593] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.live_migration_downtime_delay = 75 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.060773] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.live_migration_downtime_steps = 10 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.060934] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.live_migration_inbound_addr = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.061110] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.live_migration_permit_auto_converge = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.061273] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.live_migration_permit_post_copy = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.061432] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.live_migration_scheme = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.061601] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.live_migration_timeout_action = abort {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.061763] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.live_migration_tunnelled = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.061922] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.live_migration_uri = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.062100] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.live_migration_with_native_tls = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.062262] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.max_queues = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.062424] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.mem_stats_period_seconds = 10 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.062697] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.062863] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.nfs_mount_options = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.063170] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.063347] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.num_aoe_discover_tries = 3 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.063515] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.num_iser_scan_tries = 5 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.063694] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.num_memory_encrypted_guests = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.063877] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.num_nvme_discover_tries = 5 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.064052] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.num_pcie_ports = 0 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.064224] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.num_volume_scan_tries = 5 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.064388] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.pmem_namespaces = [] {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.064548] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.quobyte_client_cfg = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.064844] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.065029] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.rbd_connect_timeout = 5 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.065196] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.065358] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.065517] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.rbd_secret_uuid = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.065678] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.rbd_user = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.065844] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.realtime_scheduler_priority = 1 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.066026] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.remote_filesystem_transport = ssh {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.066189] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.rescue_image_id = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.066348] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.rescue_kernel_id = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.066504] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.rescue_ramdisk_id = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.066670] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.rng_dev_path = /dev/urandom {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.066872] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.rx_queue_size = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.067068] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.smbfs_mount_options = {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.067367] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.067544] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.snapshot_compression = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.067717] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.snapshot_image_format = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.067997] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.068191] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.sparse_logical_volumes = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.068356] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.swtpm_enabled = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.068525] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.swtpm_group = tss {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.068694] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.swtpm_user = tss {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.068867] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.sysinfo_serial = unique {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.069051] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.tb_cache_size = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.069218] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.tx_queue_size = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.069381] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.uid_maps = [] {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.069543] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.use_virtio_for_bridges = True {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.069714] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.virt_type = kvm {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.069894] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.volume_clear = zero {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.070227] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.volume_clear_size = 0 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.070435] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.volume_use_multipath = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.070621] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.vzstorage_cache_path = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.070806] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.070980] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.vzstorage_mount_group = qemu {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.071164] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.vzstorage_mount_opts = [] {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.071331] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.071619] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.071808] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.vzstorage_mount_user = stack {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.071984] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.072173] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] neutron.auth_section = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.072354] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] neutron.auth_type = password {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.072515] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] neutron.cafile = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.072698] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] neutron.certfile = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.072878] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] neutron.collect_timing = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.073051] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] neutron.connect_retries = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.073215] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] neutron.connect_retry_delay = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.073385] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] neutron.default_floating_pool = public {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.073542] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] neutron.endpoint_override = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.073705] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] neutron.extension_sync_interval = 600 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.073867] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] neutron.http_retries = 3 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.074038] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] neutron.insecure = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.074207] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] neutron.keyfile = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.074369] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] neutron.max_version = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.074540] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] neutron.metadata_proxy_shared_secret = **** {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.074701] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] neutron.min_version = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.074875] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] neutron.ovs_bridge = br-int {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.075047] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] neutron.physnets = [] {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.075222] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] neutron.region_name = RegionOne {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.075380] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] neutron.retriable_status_codes = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.075547] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] neutron.service_metadata_proxy = True {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.075708] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] neutron.service_name = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.075876] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] neutron.service_type = network {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.076049] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] neutron.split_loggers = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.076211] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] neutron.status_code_retries = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.076369] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] neutron.status_code_retry_delay = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.076526] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] neutron.timeout = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.076706] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.076867] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] neutron.version = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.077048] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] notifications.bdms_in_notifications = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.077229] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] notifications.default_level = INFO {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.077396] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] notifications.include_share_mapping = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.077570] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] notifications.notification_format = unversioned {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.077733] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] notifications.notify_on_state_change = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.077910] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.078097] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] pci.alias = [] {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.078271] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] pci.device_spec = [] {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.078434] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] pci.report_in_placement = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.078603] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] placement.auth_section = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.078773] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] placement.auth_type = password {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.078941] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] placement.auth_url = http://10.180.1.21/identity {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.079112] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] placement.cafile = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.079270] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] placement.certfile = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.079431] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] placement.collect_timing = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.079590] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] placement.connect_retries = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.079748] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] placement.connect_retry_delay = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.079910] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] placement.default_domain_id = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.080079] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] placement.default_domain_name = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.080240] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] placement.domain_id = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.080398] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] placement.domain_name = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.080564] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] placement.endpoint_override = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.080746] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] placement.insecure = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.080909] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] placement.keyfile = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.081136] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] placement.max_version = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.081239] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] placement.min_version = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.081408] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] placement.password = **** {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.081567] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] placement.project_domain_id = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.081735] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] placement.project_domain_name = Default {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.081903] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] placement.project_id = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.082088] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] placement.project_name = service {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.082264] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] placement.region_name = RegionOne {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.082425] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] placement.retriable_status_codes = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.082583] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] placement.service_name = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.082807] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] placement.service_type = placement {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.082978] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] placement.split_loggers = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.083155] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] placement.status_code_retries = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.083316] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] placement.status_code_retry_delay = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.083475] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] placement.system_scope = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.083664] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] placement.timeout = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.083795] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] placement.trust_id = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.083949] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] placement.user_domain_id = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.084130] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] placement.user_domain_name = Default {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.084289] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] placement.user_id = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.084459] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] placement.username = nova {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.084637] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.084797] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] placement.version = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.084977] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] quota.cores = 20 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.085180] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] quota.count_usage_from_placement = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.085356] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.085528] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] quota.injected_file_content_bytes = 10240 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.085696] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] quota.injected_file_path_length = 255 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.085865] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] quota.injected_files = 5 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.086040] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] quota.instances = 10 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.086210] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] quota.key_pairs = 100 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.086377] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] quota.metadata_items = 128 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.086542] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] quota.ram = 51200 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.086705] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] quota.recheck_quota = True {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.086873] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] quota.server_group_members = 10 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.087048] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] quota.server_groups = 10 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.087261] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] quota.unified_limits_resource_list = ['servers'] {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.087439] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] quota.unified_limits_resource_strategy = require {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.087612] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.087776] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.087943] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] scheduler.image_metadata_prefilter = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.088119] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.088287] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] scheduler.max_attempts = 3 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.088450] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] scheduler.max_placement_results = 1000 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.088613] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.088776] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] scheduler.query_placement_for_image_type_support = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.088937] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.089124] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] scheduler.workers = 2 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.089300] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.089477] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.089661] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.089834] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.089996] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.090175] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.090337] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.090521] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.090715] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] filter_scheduler.host_subset_size = 1 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.090888] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.091058] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] filter_scheduler.image_properties_default_architecture = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.091226] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.091389] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] filter_scheduler.isolated_hosts = [] {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.091567] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] filter_scheduler.isolated_images = [] {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.091732] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] filter_scheduler.max_instances_per_host = 50 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.091894] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.092064] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.092229] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] filter_scheduler.pci_in_placement = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.092392] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.092552] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.092746] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.092915] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.093097] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.093264] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.093425] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] filter_scheduler.track_instance_changes = True {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.093614] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.093775] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] metrics.required = True {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.093940] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] metrics.weight_multiplier = 1.0 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.094115] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] metrics.weight_of_unavailable = -10000.0 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.094282] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] metrics.weight_setting = [] {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.094590] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.094767] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] serial_console.enabled = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.094945] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] serial_console.port_range = 10000:20000 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.095131] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.095307] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.095477] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] serial_console.serialproxy_port = 6083 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.095644] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] service_user.auth_section = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.095817] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] service_user.auth_type = password {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.095980] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] service_user.cafile = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.096155] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] service_user.certfile = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.096320] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] service_user.collect_timing = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.096479] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] service_user.insecure = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.096635] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] service_user.keyfile = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.096803] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] service_user.send_service_user_token = True {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.096967] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] service_user.split_loggers = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.097137] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] service_user.timeout = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.097310] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] spice.agent_enabled = True {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.097471] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] spice.enabled = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.097782] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.097984] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] spice.html5proxy_host = 0.0.0.0 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.098167] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] spice.html5proxy_port = 6082 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.098332] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] spice.image_compression = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.098494] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] spice.jpeg_compression = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.098654] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] spice.playback_compression = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.098816] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] spice.require_secure = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.098987] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] spice.server_listen = 127.0.0.1 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.099169] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.099330] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] spice.streaming_mode = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.099488] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] spice.zlib_compression = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.099651] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] upgrade_levels.baseapi = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.099821] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] upgrade_levels.compute = auto {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.099981] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] upgrade_levels.conductor = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.100155] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] upgrade_levels.scheduler = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.100322] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] vendordata_dynamic_auth.auth_section = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.100484] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] vendordata_dynamic_auth.auth_type = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.100675] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] vendordata_dynamic_auth.cafile = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.100839] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] vendordata_dynamic_auth.certfile = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.101015] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] vendordata_dynamic_auth.collect_timing = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.101182] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] vendordata_dynamic_auth.insecure = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.101344] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] vendordata_dynamic_auth.keyfile = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.101506] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] vendordata_dynamic_auth.split_loggers = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.101666] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] vendordata_dynamic_auth.timeout = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.101842] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] vmware.api_retry_count = 10 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.102011] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] vmware.ca_file = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.102190] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] vmware.cache_prefix = devstack-image-cache {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.102359] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] vmware.cluster_name = testcl1 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.102524] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] vmware.connection_pool_size = 10 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.102705] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] vmware.console_delay_seconds = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.102888] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] vmware.datastore_regex = ^datastore.* {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.103110] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.103287] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] vmware.host_password = **** {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.103459] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] vmware.host_port = 443 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.103629] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] vmware.host_username = administrator@vsphere.local {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.103800] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] vmware.insecure = True {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.103961] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] vmware.integration_bridge = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.104135] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] vmware.maximum_objects = 100 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.104293] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] vmware.pbm_default_policy = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.104450] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] vmware.pbm_enabled = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.104608] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] vmware.pbm_wsdl_location = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.104778] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.104937] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] vmware.serial_port_proxy_uri = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.105106] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] vmware.serial_port_service_uri = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.105274] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] vmware.task_poll_interval = 0.5 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.105445] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] vmware.use_linked_clone = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.105610] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] vmware.vnc_keymap = en-us {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.105776] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] vmware.vnc_port = 5900 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.105940] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] vmware.vnc_port_total = 10000 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.106136] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] vnc.auth_schemes = ['none'] {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.106313] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] vnc.enabled = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.106603] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.106791] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.106965] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] vnc.novncproxy_port = 6080 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.107166] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] vnc.server_listen = 127.0.0.1 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.107347] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.107509] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] vnc.vencrypt_ca_certs = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.107666] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] vnc.vencrypt_client_cert = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.107825] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] vnc.vencrypt_client_key = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.107996] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.108173] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] workarounds.disable_deep_image_inspection = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.108335] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] workarounds.disable_fallback_pcpu_query = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.108494] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] workarounds.disable_group_policy_check_upcall = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.108651] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.108810] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] workarounds.disable_rootwrap = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.108969] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] workarounds.enable_numa_live_migration = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.109141] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.109300] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.109456] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] workarounds.handle_virt_lifecycle_events = True {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.109614] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] workarounds.libvirt_disable_apic = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.109769] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] workarounds.never_download_image_if_on_rbd = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.109930] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.110100] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.110261] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.110420] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.110591] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.110777] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.110941] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.111114] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.111281] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.111463] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.111633] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] wsgi.client_socket_timeout = 900 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.111801] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] wsgi.default_pool_size = 1000 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.111969] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] wsgi.keep_alive = True {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.112150] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] wsgi.max_header_line = 16384 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.112312] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] wsgi.secure_proxy_ssl_header = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.112472] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] wsgi.ssl_ca_file = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.112633] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] wsgi.ssl_cert_file = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.112850] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] wsgi.ssl_key_file = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.113044] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] wsgi.tcp_keepidle = 600 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.113222] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.113388] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] zvm.ca_file = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.113546] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] zvm.cloud_connector_url = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.113847] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.114039] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] zvm.reachable_timeout = 300 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.114217] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.114395] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.114569] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] profiler.connection_string = messaging:// {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.114733] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] profiler.enabled = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.114903] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] profiler.es_doc_type = notification {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.115105] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] profiler.es_scroll_size = 10000 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.115286] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] profiler.es_scroll_time = 2m {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.115448] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] profiler.filter_error_trace = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.115615] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] profiler.hmac_keys = **** {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.115779] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] profiler.sentinel_service_name = mymaster {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.115940] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] profiler.socket_timeout = 0.1 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.116112] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] profiler.trace_requests = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.116273] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] profiler.trace_sqlalchemy = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.116450] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] profiler_jaeger.process_tags = {} {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.116607] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] profiler_jaeger.service_name_prefix = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.116768] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] profiler_otlp.service_name_prefix = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.116933] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] remote_debug.host = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.117097] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] remote_debug.port = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.117277] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.117440] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.117601] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.117763] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.117925] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.118096] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.118301] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.118430] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.118592] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.118761] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] oslo_messaging_rabbit.hostname = devstack {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.118924] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.119106] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.119275] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.119444] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.119613] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.119780] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.119944] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.120132] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.120297] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.120459] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.120646] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.120830] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.120991] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.121169] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.121328] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.121486] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.121643] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.121800] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.121967] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.122146] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] oslo_messaging_rabbit.ssl = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.122324] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.122489] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.122649] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.122850] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.123036] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] oslo_messaging_rabbit.ssl_version = {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.123202] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.123389] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.123553] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] oslo_messaging_notifications.retry = -1 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.123734] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.123910] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] oslo_messaging_notifications.transport_url = **** {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.124093] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] oslo_limit.auth_section = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.124259] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] oslo_limit.auth_type = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.124414] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] oslo_limit.cafile = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.124569] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] oslo_limit.certfile = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.124728] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] oslo_limit.collect_timing = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.124885] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] oslo_limit.connect_retries = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.125070] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] oslo_limit.connect_retry_delay = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.125244] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] oslo_limit.endpoint_id = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.125415] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] oslo_limit.endpoint_interface = publicURL {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.125572] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] oslo_limit.endpoint_override = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.125728] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] oslo_limit.endpoint_region_name = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.125885] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] oslo_limit.endpoint_service_name = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.126050] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] oslo_limit.endpoint_service_type = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.126215] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] oslo_limit.insecure = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.126372] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] oslo_limit.keyfile = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.126530] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] oslo_limit.max_version = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.126685] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] oslo_limit.min_version = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.126843] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] oslo_limit.region_name = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.126999] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] oslo_limit.retriable_status_codes = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.127171] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] oslo_limit.service_name = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.127327] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] oslo_limit.service_type = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.127485] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] oslo_limit.split_loggers = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.127641] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] oslo_limit.status_code_retries = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.127798] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] oslo_limit.status_code_retry_delay = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.127955] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] oslo_limit.timeout = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.128126] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] oslo_limit.valid_interfaces = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.128285] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] oslo_limit.version = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.128448] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] oslo_reports.file_event_handler = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.128611] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] oslo_reports.file_event_handler_interval = 1 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.128767] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] oslo_reports.log_dir = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.128938] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.129111] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] vif_plug_linux_bridge_privileged.group = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.129272] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.129435] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.129597] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.129756] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] vif_plug_linux_bridge_privileged.user = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.129929] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.130097] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] vif_plug_ovs_privileged.group = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.130257] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] vif_plug_ovs_privileged.helper_command = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.130422] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.130601] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.130786] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] vif_plug_ovs_privileged.user = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.130953] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] os_vif_linux_bridge.flat_interface = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.131147] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.131320] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.131491] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.131659] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.131826] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.131990] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.132164] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] os_vif_linux_bridge.vlan_interface = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.132342] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.132512] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] os_vif_ovs.isolate_vif = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.132707] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.132875] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.133056] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.133228] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] os_vif_ovs.ovsdb_interface = native {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.133387] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] os_vif_ovs.per_port_bridge = False {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.133579] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] privsep_osbrick.capabilities = [21] {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.133725] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] privsep_osbrick.group = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.133877] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] privsep_osbrick.helper_command = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.134055] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.134223] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] privsep_osbrick.thread_pool_size = 8 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.134378] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] privsep_osbrick.user = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.134550] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.134706] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] nova_sys_admin.group = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.134864] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] nova_sys_admin.helper_command = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.135037] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.135202] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] nova_sys_admin.thread_pool_size = 8 {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.135358] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] nova_sys_admin.user = None {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 521.135489] env[62405]: DEBUG oslo_service.service [None req-9ae97520-980c-4ec4-8d8a-5e61120ac53a None None] ******************************************************************************** {{(pid=62405) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2828}} [ 521.135915] env[62405]: INFO nova.service [-] Starting compute node (version 0.0.1) [ 521.639900] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-1b9cad90-3198-4103-9169-30e8fd6e14bd None None] Getting list of instances from cluster (obj){ [ 521.639900] env[62405]: value = "domain-c8" [ 521.639900] env[62405]: _type = "ClusterComputeResource" [ 521.639900] env[62405]: } {{(pid=62405) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 521.641178] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d79916e0-fa51-4a5f-9bd5-0d8d1fc89e57 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 521.650697] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-1b9cad90-3198-4103-9169-30e8fd6e14bd None None] Got total of 0 instances {{(pid=62405) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 521.651313] env[62405]: WARNING nova.virt.vmwareapi.driver [None req-1b9cad90-3198-4103-9169-30e8fd6e14bd None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 521.651830] env[62405]: INFO nova.virt.node [None req-1b9cad90-3198-4103-9169-30e8fd6e14bd None None] Generated node identity 7d5eded7-a501-4fa6-b1d3-60e273d555d7 [ 521.652142] env[62405]: INFO nova.virt.node [None req-1b9cad90-3198-4103-9169-30e8fd6e14bd None None] Wrote node identity 7d5eded7-a501-4fa6-b1d3-60e273d555d7 to /opt/stack/data/n-cpu-1/compute_id [ 522.154865] env[62405]: WARNING nova.compute.manager [None req-1b9cad90-3198-4103-9169-30e8fd6e14bd None None] Compute nodes ['7d5eded7-a501-4fa6-b1d3-60e273d555d7'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 523.159951] env[62405]: INFO nova.compute.manager [None req-1b9cad90-3198-4103-9169-30e8fd6e14bd None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 524.164812] env[62405]: WARNING nova.compute.manager [None req-1b9cad90-3198-4103-9169-30e8fd6e14bd None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 524.165160] env[62405]: DEBUG oslo_concurrency.lockutils [None req-1b9cad90-3198-4103-9169-30e8fd6e14bd None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 524.165354] env[62405]: DEBUG oslo_concurrency.lockutils [None req-1b9cad90-3198-4103-9169-30e8fd6e14bd None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 524.165508] env[62405]: DEBUG oslo_concurrency.lockutils [None req-1b9cad90-3198-4103-9169-30e8fd6e14bd None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 524.165659] env[62405]: DEBUG nova.compute.resource_tracker [None req-1b9cad90-3198-4103-9169-30e8fd6e14bd None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62405) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 524.166586] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68a2649e-9944-4855-a263-d46072fcf6ad {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.174544] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ece56292-e166-4695-8898-4506c546a202 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.188658] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eab4a7cc-14f9-4d08-8f9f-6a9725ec4909 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.194851] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b82eac0-57d0-4470-8e59-e3d4eee6fb29 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.222299] env[62405]: DEBUG nova.compute.resource_tracker [None req-1b9cad90-3198-4103-9169-30e8fd6e14bd None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181325MB free_disk=172GB free_vcpus=48 pci_devices=None {{(pid=62405) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 524.222447] env[62405]: DEBUG oslo_concurrency.lockutils [None req-1b9cad90-3198-4103-9169-30e8fd6e14bd None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 524.222620] env[62405]: DEBUG oslo_concurrency.lockutils [None req-1b9cad90-3198-4103-9169-30e8fd6e14bd None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 524.725161] env[62405]: WARNING nova.compute.resource_tracker [None req-1b9cad90-3198-4103-9169-30e8fd6e14bd None None] No compute node record for cpu-1:7d5eded7-a501-4fa6-b1d3-60e273d555d7: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host 7d5eded7-a501-4fa6-b1d3-60e273d555d7 could not be found. [ 525.228871] env[62405]: INFO nova.compute.resource_tracker [None req-1b9cad90-3198-4103-9169-30e8fd6e14bd None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 [ 526.736639] env[62405]: DEBUG nova.compute.resource_tracker [None req-1b9cad90-3198-4103-9169-30e8fd6e14bd None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62405) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 526.737057] env[62405]: DEBUG nova.compute.resource_tracker [None req-1b9cad90-3198-4103-9169-30e8fd6e14bd None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62405) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 526.892845] env[62405]: INFO nova.scheduler.client.report [None req-1b9cad90-3198-4103-9169-30e8fd6e14bd None None] [req-9a771dae-9437-4ab2-920e-09d22a08eb4f] Created resource provider record via placement API for resource provider with UUID 7d5eded7-a501-4fa6-b1d3-60e273d555d7 and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 526.909559] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d116651-786b-4244-a7ff-3635449be64e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 526.917120] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-553d6e58-0132-448c-84fa-cad7aae328f8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 526.946743] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b83b4923-0658-4e45-8ef1-73a56378bc46 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 526.954130] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0d11cae-bee1-420b-ba17-f3bc10b70006 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 526.967258] env[62405]: DEBUG nova.compute.provider_tree [None req-1b9cad90-3198-4103-9169-30e8fd6e14bd None None] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 172, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 527.505583] env[62405]: DEBUG nova.scheduler.client.report [None req-1b9cad90-3198-4103-9169-30e8fd6e14bd None None] Updated inventory for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 172, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 527.505804] env[62405]: DEBUG nova.compute.provider_tree [None req-1b9cad90-3198-4103-9169-30e8fd6e14bd None None] Updating resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 generation from 0 to 1 during operation: update_inventory {{(pid=62405) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 527.505945] env[62405]: DEBUG nova.compute.provider_tree [None req-1b9cad90-3198-4103-9169-30e8fd6e14bd None None] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 172, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 527.551301] env[62405]: DEBUG nova.compute.provider_tree [None req-1b9cad90-3198-4103-9169-30e8fd6e14bd None None] Updating resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 generation from 1 to 2 during operation: update_traits {{(pid=62405) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 528.056403] env[62405]: DEBUG nova.compute.resource_tracker [None req-1b9cad90-3198-4103-9169-30e8fd6e14bd None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62405) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 528.056704] env[62405]: DEBUG oslo_concurrency.lockutils [None req-1b9cad90-3198-4103-9169-30e8fd6e14bd None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.834s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 528.056786] env[62405]: DEBUG nova.service [None req-1b9cad90-3198-4103-9169-30e8fd6e14bd None None] Creating RPC server for service compute {{(pid=62405) start /opt/stack/nova/nova/service.py:186}} [ 528.071020] env[62405]: DEBUG nova.service [None req-1b9cad90-3198-4103-9169-30e8fd6e14bd None None] Join ServiceGroup membership for this service compute {{(pid=62405) start /opt/stack/nova/nova/service.py:203}} [ 528.071229] env[62405]: DEBUG nova.servicegroup.drivers.db [None req-1b9cad90-3198-4103-9169-30e8fd6e14bd None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=62405) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 588.072297] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 588.072794] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 588.072794] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Starting heal instance info cache {{(pid=62405) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10446}} [ 588.072923] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Rebuilding the list of instances to heal {{(pid=62405) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10450}} [ 588.579542] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Didn't find any instances for network info cache update. {{(pid=62405) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10532}} [ 588.579782] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 588.580041] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 588.580240] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 588.580426] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 588.580607] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 588.580782] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._sync_power_states {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 589.083918] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Getting list of instances from cluster (obj){ [ 589.083918] env[62405]: value = "domain-c8" [ 589.083918] env[62405]: _type = "ClusterComputeResource" [ 589.083918] env[62405]: } {{(pid=62405) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 589.085228] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea0c8549-1c09-4e65-a801-768d0772e013 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.093556] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Got total of 0 instances {{(pid=62405) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 589.093771] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 589.093973] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62405) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11065}} [ 589.094164] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager.update_available_resource {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 589.597093] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 589.597335] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 589.597500] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 589.597651] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62405) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 589.598550] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58e9e46b-faa2-4582-99f7-72ec27b030ba {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.606758] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c113b3ac-1d3d-423f-9c2c-0aa8ff1686e6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.620196] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-708d93d9-fe23-4951-8c4c-af16bf79d2f8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.626295] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc14a54e-a6a3-4996-be7e-5fe1f4d5159c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.654673] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181339MB free_disk=172GB free_vcpus=48 pci_devices=None {{(pid=62405) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 589.654829] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 589.654993] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 590.672935] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62405) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 590.673216] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62405) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 590.689055] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b03bcd98-841a-41dc-be9b-0364a53a8e0c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.696828] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c465c07e-1f8f-4360-aa6a-f5396ddefefb {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.725649] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4225fbae-69b0-4991-9e8f-44202fc05604 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.733089] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31f031ef-bdc5-45f2-838b-c1c8b402585b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.746041] env[62405]: DEBUG nova.compute.provider_tree [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 591.249517] env[62405]: DEBUG nova.scheduler.client.report [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 172, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 591.755168] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62405) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 591.755631] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.100s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 591.755631] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 591.755938] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Getting list of instances from cluster (obj){ [ 591.755938] env[62405]: value = "domain-c8" [ 591.755938] env[62405]: _type = "ClusterComputeResource" [ 591.755938] env[62405]: } {{(pid=62405) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 591.756964] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a0f11df-dd7e-4929-9a3d-ce21426e93ac {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.765669] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Got total of 0 instances {{(pid=62405) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 640.089701] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 640.090161] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 640.598022] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 640.598022] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Starting heal instance info cache {{(pid=62405) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10446}} [ 640.598022] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Rebuilding the list of instances to heal {{(pid=62405) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10450}} [ 641.101741] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Didn't find any instances for network info cache update. {{(pid=62405) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10532}} [ 641.102189] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 641.102189] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 641.102304] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 641.102388] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 641.102527] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 641.102664] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 641.102791] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62405) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11065}} [ 641.102926] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager.update_available_resource {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 641.606389] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 641.606628] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 641.606793] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 641.606940] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62405) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 641.607843] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d86ec59-c49e-4f06-862c-a988c8f4184d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.615787] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f40cc4e-7995-486e-aae1-ecc553aba7b8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.629098] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd3102c5-4dd9-4baf-bf34-2bee5f89e2f7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.635020] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f0d6c80-0618-4903-a60c-e5d1f06e61ac {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.662270] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181334MB free_disk=172GB free_vcpus=48 pci_devices=None {{(pid=62405) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 641.662394] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 641.662577] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 642.680983] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62405) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 642.681290] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62405) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 642.694414] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7e59376-dfb1-43aa-9e7b-4bdb3422a433 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.703814] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32e8ed2d-fd65-488c-b602-e9e883d3b6e7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.736645] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b96a4ac-dbac-4c17-b7e0-ec43113f2f38 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.744344] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fecd9e1-273a-444a-a23c-0433e286ffec {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.757352] env[62405]: DEBUG nova.compute.provider_tree [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 643.260319] env[62405]: DEBUG nova.scheduler.client.report [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 172, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 643.261635] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62405) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 643.261819] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.599s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 703.263394] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 703.263739] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 703.263739] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Starting heal instance info cache {{(pid=62405) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10446}} [ 703.263802] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Rebuilding the list of instances to heal {{(pid=62405) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10450}} [ 703.766527] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Didn't find any instances for network info cache update. {{(pid=62405) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10532}} [ 703.766786] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 703.766906] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 703.767073] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 703.767227] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 703.767374] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 703.767509] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 703.767634] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62405) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11065}} [ 703.767770] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager.update_available_resource {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 704.271092] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 704.271471] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 704.271510] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 704.271659] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62405) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 704.272568] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8463783-778c-4fcc-a596-e5239f9dcba9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.280537] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2db13e64-b025-4edb-943a-f99cd060e262 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.294367] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06704d74-b395-4150-afae-fa262342bb7c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.300380] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6400d9e-ae99-4ccf-b171-72ab26c33616 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.328102] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181329MB free_disk=172GB free_vcpus=48 pci_devices=None {{(pid=62405) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 704.328252] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 704.328436] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 705.346503] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62405) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 705.346748] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62405) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 705.359910] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8cb2698-bae1-43c0-8865-d78f6e1cfa0a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.367480] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2ae9a3a-251f-4d4d-861e-8875d65469e8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.396767] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d46f83f-36b0-4478-8ac3-999bb2032e0f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.403268] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17dd2099-be47-45ba-b8a1-9dbc7198a03e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.415504] env[62405]: DEBUG nova.compute.provider_tree [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 705.918671] env[62405]: DEBUG nova.scheduler.client.report [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 172, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 705.919941] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62405) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 705.920136] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.592s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 761.054986] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 761.055458] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 761.560237] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 761.560237] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Starting heal instance info cache {{(pid=62405) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10446}} [ 761.560237] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Rebuilding the list of instances to heal {{(pid=62405) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10450}} [ 762.062273] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Didn't find any instances for network info cache update. {{(pid=62405) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10532}} [ 762.062758] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 762.062925] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 762.063248] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 762.063536] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 762.063794] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 762.064075] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 762.064315] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62405) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11065}} [ 762.064557] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager.update_available_resource {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 762.567682] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 762.567920] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 762.568150] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 762.568287] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62405) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 762.569162] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9b1e34f-b6ca-42f9-b803-6dbad51059a9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.578541] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89f58502-3159-4fcc-89a1-ea534f7f1d01 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.591873] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9f22e30-b7cf-44a5-9d6d-9eae170c2c6a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.597843] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4b94a9a-39c6-4df1-b602-62eeeee921b7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.625015] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181341MB free_disk=172GB free_vcpus=48 pci_devices=None {{(pid=62405) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 762.625156] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 762.625413] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 763.643636] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62405) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 763.643915] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62405) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 763.658206] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad982aff-f151-4195-81ab-87c5d79a8d2a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.665358] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26b575ae-a7b9-4d1c-8f54-0dddfcb1bc6d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.694112] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03d90c2c-6fa2-43c8-a199-9b54d2ccebfc {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.700616] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06d35334-a445-4245-a2d9-8d8b1c984108 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.712706] env[62405]: DEBUG nova.compute.provider_tree [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 764.216074] env[62405]: DEBUG nova.scheduler.client.report [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 172, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 764.217340] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62405) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 764.217523] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.592s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 816.401372] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 816.401767] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Cleaning up deleted instances {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11746}} [ 816.909432] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] There are 0 instances to clean {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11755}} [ 816.909671] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 816.909809] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Cleaning up deleted instances with incomplete migration {{(pid=62405) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11784}} [ 817.412659] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 819.916734] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 819.916734] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 819.916734] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 819.916734] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 819.916734] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62405) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11065}} [ 820.401814] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 821.401667] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 821.402078] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Starting heal instance info cache {{(pid=62405) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10446}} [ 821.402078] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Rebuilding the list of instances to heal {{(pid=62405) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10450}} [ 821.904703] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Didn't find any instances for network info cache update. {{(pid=62405) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10532}} [ 822.401467] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 822.401726] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 822.401941] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager.update_available_resource {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 822.905540] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 822.905803] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 822.905985] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 822.906161] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62405) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 822.907069] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84a6ea87-034b-47e9-9727-386d56d14cbc {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.915023] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-602b4879-58c6-41dc-84e7-66891716d9f4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.929827] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff5d358b-7bc3-4cc0-9abf-8ae372994eae {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.935958] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-551439ec-b817-4e25-84bb-a4601b15c69d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.963458] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181337MB free_disk=172GB free_vcpus=48 pci_devices=None {{(pid=62405) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 822.963634] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 822.963834] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 823.984181] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62405) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 823.984495] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62405) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 823.999941] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-560d7c68-b17f-44c1-9c42-6b578f42c98d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.007449] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1aaa126-6f48-47bd-906a-547571e95baa {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.036504] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a846125c-983f-470e-9488-ea58731eeea2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.043345] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d38073fe-792c-4a3c-ad7f-eb13535d662e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.055530] env[62405]: DEBUG nova.compute.provider_tree [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 824.558773] env[62405]: DEBUG nova.scheduler.client.report [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 172, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 824.560098] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62405) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 824.560285] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.596s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 880.560689] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 880.561182] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 880.561182] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 880.561254] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 880.561387] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62405) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11065}} [ 881.401914] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 882.402224] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager.update_available_resource {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 882.906026] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 882.906326] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 882.906508] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 882.906662] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62405) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 882.907575] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90bc68e7-1f2e-44c7-83dc-f09da2424728 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.915622] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a58522c5-cbca-4615-b92a-76a7493685b9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.929267] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7b94f94-da60-4aa2-adb7-52c96e421b1d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.935084] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3771d08c-9eee-47d5-b6fc-d3589d6aaf99 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.964113] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181333MB free_disk=172GB free_vcpus=48 pci_devices=None {{(pid=62405) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 882.964291] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 882.964475] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 883.997432] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62405) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 883.997801] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62405) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 884.013926] env[62405]: DEBUG nova.scheduler.client.report [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Refreshing inventories for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 884.026095] env[62405]: DEBUG nova.scheduler.client.report [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Updating ProviderTree inventory for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 172, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 884.026308] env[62405]: DEBUG nova.compute.provider_tree [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 172, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 884.037740] env[62405]: DEBUG nova.scheduler.client.report [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Refreshing aggregate associations for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7, aggregates: None {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 884.052636] env[62405]: DEBUG nova.scheduler.client.report [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Refreshing trait associations for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 884.063302] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f0809ef-5765-4ef8-b5d7-e617e3e0e184 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.070284] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74b85d69-3466-42bf-8407-d9d2579a8b1f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.098890] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-752a24af-6d47-4194-994f-553c680944bc {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.105571] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17409063-803a-4ca0-a54c-7f0ddcd8ee9a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.118141] env[62405]: DEBUG nova.compute.provider_tree [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 884.621102] env[62405]: DEBUG nova.scheduler.client.report [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 172, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 884.622361] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62405) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 884.622542] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.658s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 885.617858] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 885.617858] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 886.121345] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 886.121345] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Starting heal instance info cache {{(pid=62405) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10446}} [ 886.121600] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Rebuilding the list of instances to heal {{(pid=62405) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10450}} [ 886.624225] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Didn't find any instances for network info cache update. {{(pid=62405) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10532}} [ 886.624668] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 938.401997] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 938.402410] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 938.402410] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 941.401154] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 941.401561] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62405) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11065}} [ 942.402646] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 943.400983] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 943.401234] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager.update_available_resource {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 943.904133] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 943.904510] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 943.904550] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 943.904681] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62405) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 943.905585] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d94b0bf8-9b31-4870-af97-dd4ec4fbd61a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.913716] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42a1893c-24b5-4ec8-abc0-7cc7ac65bd44 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.927170] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e91023ef-5d93-47bc-b806-73420377b69a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.933094] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44504012-e08e-47f4-b885-5fd2a0a66724 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.962110] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181332MB free_disk=172GB free_vcpus=48 pci_devices=None {{(pid=62405) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 943.962348] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 943.962562] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 944.980593] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62405) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 944.981170] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62405) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 944.994674] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1386491-23c2-46bf-892d-bfd37a5d6b98 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.001988] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c26940fe-b329-4951-ae0e-ce78ff047c02 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.030518] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19a628d7-c0fd-4ce2-aac4-61643f22b3cc {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.036689] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b4c37e4-c5ee-4918-aacb-5a945f361fc5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.049710] env[62405]: DEBUG nova.compute.provider_tree [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 945.552731] env[62405]: DEBUG nova.scheduler.client.report [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 172, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 945.554063] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62405) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 945.554243] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.592s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 946.548846] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 946.549246] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 946.549510] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Starting heal instance info cache {{(pid=62405) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10446}} [ 946.550210] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Rebuilding the list of instances to heal {{(pid=62405) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10450}} [ 947.053450] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Didn't find any instances for network info cache update. {{(pid=62405) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10532}} [ 998.401241] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 999.401628] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 999.402027] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1003.401967] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1003.401967] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1003.401967] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62405) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11065}} [ 1004.401633] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1004.401807] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager.update_available_resource {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1004.905076] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1004.905438] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1004.905479] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1004.905631] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62405) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1004.906502] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3596676c-d5dc-4233-9140-ee1d44461c34 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.914524] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd9d2445-8604-458b-bafb-bdd723c3795d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.927992] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b40c1ea-28a8-4c48-9662-a661e744640a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.933894] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3215b292-ee3a-4cd4-b5fc-d083a0d5671a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.963107] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181324MB free_disk=172GB free_vcpus=48 pci_devices=None {{(pid=62405) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1004.963307] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1004.963459] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1005.981534] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62405) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1005.981788] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62405) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1005.994824] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-581e4d21-6426-46b2-9e25-26173e8586b1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.002389] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b06dc571-7ff5-4844-b982-48d865006491 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.030120] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f90e414b-c055-4d3e-bc10-c688f5a8efb0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.036635] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79e07f69-b3b1-4dc2-a800-cb68e80c8a52 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.049057] env[62405]: DEBUG nova.compute.provider_tree [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1006.552365] env[62405]: DEBUG nova.scheduler.client.report [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 172, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1006.553634] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62405) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1006.553816] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.590s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1008.548793] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1008.549250] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1009.054062] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1009.054247] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Starting heal instance info cache {{(pid=62405) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10446}} [ 1009.054335] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Rebuilding the list of instances to heal {{(pid=62405) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10450}} [ 1009.559270] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Didn't find any instances for network info cache update. {{(pid=62405) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10532}} [ 1059.402438] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1060.401740] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1060.401966] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1063.400847] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1063.401220] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62405) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11065}} [ 1064.402752] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1065.401698] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1066.402819] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1066.402819] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Starting heal instance info cache {{(pid=62405) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10446}} [ 1066.402819] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Rebuilding the list of instances to heal {{(pid=62405) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10450}} [ 1066.904862] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Didn't find any instances for network info cache update. {{(pid=62405) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10532}} [ 1066.905193] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager.update_available_resource {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1067.408126] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1067.408567] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1067.408686] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1067.408782] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62405) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1067.409664] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbd967ef-e3ed-4b80-9e77-4d295cf8b54f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.417821] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efac202c-ba71-4b35-8503-d2b6602552a0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.431389] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-163528db-d104-4a8d-a3aa-3e185457e68f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.437231] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d8839d6-dcb8-45bb-b9ed-fbf574eb9b6a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.465251] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181323MB free_disk=172GB free_vcpus=48 pci_devices=None {{(pid=62405) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1067.465377] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1067.465567] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1068.485267] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62405) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1068.485543] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62405) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1068.499566] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5bef863-7b13-49d5-9c36-ef3220e00a20 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.507550] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b687f8f2-5f83-4e77-be3b-35581ae93e93 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.536654] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a21d5cb8-f71b-4c55-b83e-1c250c3afd39 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.543666] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f018954-3b2e-4854-a2ef-dea763f85284 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.556220] env[62405]: DEBUG nova.compute.provider_tree [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1069.059128] env[62405]: DEBUG nova.scheduler.client.report [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 172, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1069.060301] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62405) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1069.060657] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.595s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1071.055023] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1116.404245] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1116.404691] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Cleaning up deleted instances with incomplete migration {{(pid=62405) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11784}} [ 1120.904716] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1120.904716] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1122.403064] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1122.403064] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1122.403557] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Cleaning up deleted instances {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11746}} [ 1122.906050] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] There are 0 instances to clean {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11755}} [ 1124.904979] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1124.905365] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62405) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11065}} [ 1125.402398] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1126.402178] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1126.402634] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager.update_available_resource {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1126.905711] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1126.905959] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1126.906137] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1126.906293] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62405) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1126.907230] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24cc5374-f20a-4490-b619-055100107d4e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.914949] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e7e6c22-2803-408c-bbd5-cbaf1b8b5a3e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.929168] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c62be08-ae82-4b79-b42b-aa51eb5ca339 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.935076] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf5cbd4d-330b-430d-8351-2fef6ccd1c38 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.962954] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181333MB free_disk=172GB free_vcpus=48 pci_devices=None {{(pid=62405) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1126.963130] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1126.963307] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1127.982467] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62405) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1127.982713] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62405) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1127.995273] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-929b880d-45b3-41d9-a55d-88ddf334cbff {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.002424] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08daffb6-58d0-449a-bf67-9f8a2210af18 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.030322] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fe53b64-a3f3-484e-a4f6-35c461140e5d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.036682] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76542884-05dd-4e16-a721-bd466efa4c0a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.048900] env[62405]: DEBUG nova.compute.provider_tree [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1128.551997] env[62405]: DEBUG nova.scheduler.client.report [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 172, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1128.553269] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62405) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1128.553456] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.590s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1128.553668] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1130.055496] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1130.055874] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1130.055874] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Starting heal instance info cache {{(pid=62405) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10446}} [ 1130.056062] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Rebuilding the list of instances to heal {{(pid=62405) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10450}} [ 1130.558545] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Didn't find any instances for network info cache update. {{(pid=62405) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10532}} [ 1132.899766] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1180.403763] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1182.402177] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1184.401730] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1184.402151] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1184.402151] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62405) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11065}} [ 1185.401833] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1186.401604] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1188.401253] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1188.401625] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Starting heal instance info cache {{(pid=62405) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10446}} [ 1188.401625] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Rebuilding the list of instances to heal {{(pid=62405) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10450}} [ 1188.904442] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Didn't find any instances for network info cache update. {{(pid=62405) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10532}} [ 1188.904649] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._sync_power_states {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1189.407691] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Getting list of instances from cluster (obj){ [ 1189.407691] env[62405]: value = "domain-c8" [ 1189.407691] env[62405]: _type = "ClusterComputeResource" [ 1189.407691] env[62405]: } {{(pid=62405) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 1189.408791] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da84ac6a-28f8-4387-8774-ed425b907463 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.417469] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Got total of 0 instances {{(pid=62405) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 1189.417697] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager.update_available_resource {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1189.921425] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1189.921699] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1189.921828] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1189.921980] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62405) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1189.922846] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5a23f95-b6f6-4055-9f67-da7ea667da59 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.930803] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d1c22c5-863f-4ff8-a75b-1a35c75b6cec {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.944981] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-090bf69d-f2b7-4ee1-bee5-1287664a5ffa {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.951264] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-500a127c-0b72-455a-917e-ed50a1b6cd51 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.980031] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181333MB free_disk=172GB free_vcpus=48 pci_devices=None {{(pid=62405) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1189.980171] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1189.980335] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1191.096633] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62405) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1191.096920] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62405) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1191.111998] env[62405]: DEBUG nova.scheduler.client.report [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Refreshing inventories for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1191.122469] env[62405]: DEBUG nova.scheduler.client.report [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Updating ProviderTree inventory for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 172, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1191.122634] env[62405]: DEBUG nova.compute.provider_tree [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 172, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1191.131215] env[62405]: DEBUG nova.scheduler.client.report [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Refreshing aggregate associations for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7, aggregates: None {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1191.145152] env[62405]: DEBUG nova.scheduler.client.report [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Refreshing trait associations for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1191.155544] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3af50f16-e85d-43f6-a880-f3ea70943725 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.162952] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7072a3ec-8d5d-4cda-83db-0164668581db {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.192653] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84f47d08-c967-407b-8601-f6496297933a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.199468] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0e271f5-beb5-4db0-acd6-144d05f844f8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.211883] env[62405]: DEBUG nova.compute.provider_tree [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1191.715228] env[62405]: DEBUG nova.scheduler.client.report [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 172, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1191.716516] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62405) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1191.716701] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.736s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1193.711884] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1240.401680] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1244.402714] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1245.400617] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1245.400839] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1245.401038] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1245.401207] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62405) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11065}} [ 1248.401290] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1248.401668] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Starting heal instance info cache {{(pid=62405) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10446}} [ 1248.401668] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Rebuilding the list of instances to heal {{(pid=62405) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10450}} [ 1248.904178] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Didn't find any instances for network info cache update. {{(pid=62405) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10532}} [ 1248.904402] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1248.905586] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager.update_available_resource {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1249.411021] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1249.411021] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1249.411021] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1249.411021] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62405) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1249.411021] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56b93a2e-f127-47e5-87a1-f07e068fb6b3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.418751] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e38d90bf-f48e-4192-9b3e-acd65efb13d9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.432612] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a6e7e41-db63-49da-b555-9a5e4b0a7d9b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.439573] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37ab86c0-c9fb-4951-a420-af37ee34ba20 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.467713] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181325MB free_disk=172GB free_vcpus=48 pci_devices=None {{(pid=62405) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1249.469564] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1249.469564] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1250.487925] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62405) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1250.488193] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62405) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1250.501397] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f51a2223-61bc-46e2-9067-dd91a7c755bc {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.509593] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ce6aae9-8c8d-444e-95a4-f0b678ab751a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.537312] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a7c2086-c4e9-4399-8263-8b188bba9213 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.543850] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0fd37e6-f71c-4157-8b6c-61aed88a5547 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.556065] env[62405]: DEBUG nova.compute.provider_tree [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1251.059148] env[62405]: DEBUG nova.scheduler.client.report [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 172, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1251.060476] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62405) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1251.060658] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.592s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1253.055279] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1256.396765] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1301.402539] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1305.403319] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1306.401650] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1307.401435] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1307.401886] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1307.401886] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62405) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11065}} [ 1308.402310] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1310.401291] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1310.401638] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Starting heal instance info cache {{(pid=62405) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10446}} [ 1310.401638] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Rebuilding the list of instances to heal {{(pid=62405) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10450}} [ 1310.904342] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Didn't find any instances for network info cache update. {{(pid=62405) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10532}} [ 1310.904583] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager.update_available_resource {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1311.408289] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1311.408734] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1311.408734] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1311.408838] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62405) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1311.409729] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73a684a8-d434-4338-832c-e3ccaadc0e27 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.417907] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fb0fcd1-1d94-4d69-816f-b8733aff97f7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.431477] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71260e3d-7d1d-4e48-a773-5efd61bdb9ac {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.437481] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70c8ef70-bfdc-4c63-bc10-df8d9c9ee9fd {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.465238] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181315MB free_disk=172GB free_vcpus=48 pci_devices=None {{(pid=62405) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1311.465382] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1311.465569] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1312.484945] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62405) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1312.485228] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62405) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1312.497919] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13ac1142-2165-491d-b04d-c20af801ba83 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.505517] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cace75e-b086-4478-9330-3001d9f899be {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.535238] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fa4e7ed-c627-4237-bb6c-2b539d72b346 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.542019] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caebad22-98dc-4d2b-acf9-52419ef60a0d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.554437] env[62405]: DEBUG nova.compute.provider_tree [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1313.058070] env[62405]: DEBUG nova.scheduler.client.report [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 172, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1313.059355] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62405) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1313.059535] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.594s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1315.054415] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1361.401388] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1366.404145] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1367.401901] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1367.402127] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1368.401354] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1369.402162] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1369.402635] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62405) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11065}} [ 1370.402262] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1370.402668] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Starting heal instance info cache {{(pid=62405) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10446}} [ 1370.402668] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Rebuilding the list of instances to heal {{(pid=62405) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10450}} [ 1370.906195] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Didn't find any instances for network info cache update. {{(pid=62405) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10532}} [ 1371.401791] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager.update_available_resource {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1371.905373] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1371.905762] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1371.905842] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1371.905963] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62405) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1371.906901] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b444d1b2-673b-46d7-8a8b-3de2fabeff3e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.914918] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc0974cf-58b0-4370-9ef7-fe6ecee73d50 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.929356] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e36c0f77-27b2-47a5-b5a4-b4332e5a04b6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.935712] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ed9d145-9236-4e37-a06d-2b3edcc9858d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.963709] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181335MB free_disk=172GB free_vcpus=48 pci_devices=None {{(pid=62405) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1371.963943] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1371.964143] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1372.982923] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62405) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1372.983193] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62405) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1372.997351] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-203afdda-767a-4d46-90da-47082fc413a1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.004784] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a2d7ec6-4d6a-4cc6-a475-846355270b5c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.034241] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3023f21-7bc0-4b6b-be2d-b97ea657a84a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.040912] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-436f1e26-3161-4ce1-a2ee-be03ba7174b5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.053262] env[62405]: DEBUG nova.compute.provider_tree [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1373.556806] env[62405]: DEBUG nova.scheduler.client.report [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 172, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1373.558175] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62405) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1373.558362] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.594s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1376.553831] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1380.397834] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1416.404020] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1416.404460] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Cleaning up deleted instances with incomplete migration {{(pid=62405) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11784}} [ 1422.906609] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1427.402233] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1427.402717] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1427.402717] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Cleaning up deleted instances {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11746}} [ 1427.905550] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] There are 0 instances to clean {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11755}} [ 1428.905740] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1429.402171] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1430.402219] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1431.402121] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1431.402390] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Starting heal instance info cache {{(pid=62405) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10446}} [ 1431.402617] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Rebuilding the list of instances to heal {{(pid=62405) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10450}} [ 1431.905791] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Didn't find any instances for network info cache update. {{(pid=62405) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10532}} [ 1431.906066] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1431.906235] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62405) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11065}} [ 1433.401219] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager.update_available_resource {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1433.904954] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1433.905229] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1433.905404] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1433.905559] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62405) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1433.906435] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6237993c-300e-4d59-8090-78eff2adfcf5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.914271] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-107854d0-98cc-4568-b93f-c2b45d4f9365 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.927593] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-256bd0cd-d8a4-464a-8fa4-3e1a13797fff {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.933427] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5f45c25-5d2f-4836-9e47-0f90ae1640f5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.960563] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181328MB free_disk=172GB free_vcpus=48 pci_devices=None {{(pid=62405) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1433.960692] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1433.960888] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1434.978266] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62405) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1434.978521] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62405) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1434.991312] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2396d132-08b7-4f5f-9986-009019a05bcf {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.998223] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4c0a16a-022a-4e86-9732-7d1b22975621 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.026473] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bca12c13-c6d0-4b36-a383-dadf720a0461 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.032682] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cee3609e-f77d-44e0-ae85-e57b69fff2e4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.044695] env[62405]: DEBUG nova.compute.provider_tree [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1435.547510] env[62405]: DEBUG nova.scheduler.client.report [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 172, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1435.548799] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62405) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1435.548984] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.588s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1436.544733] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1441.401696] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1463.309168] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Acquiring lock "2257c786-54f9-441a-832c-cf3178bfcc78" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1463.309467] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Lock "2257c786-54f9-441a-832c-cf3178bfcc78" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1463.815020] env[62405]: DEBUG nova.compute.manager [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] [instance: 2257c786-54f9-441a-832c-cf3178bfcc78] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1464.032934] env[62405]: DEBUG oslo_concurrency.lockutils [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Acquiring lock "3c0b964f-c900-4704-ae12-7eba7952f678" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1464.033914] env[62405]: DEBUG oslo_concurrency.lockutils [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Lock "3c0b964f-c900-4704-ae12-7eba7952f678" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1464.367508] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1464.371231] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1464.375918] env[62405]: INFO nova.compute.claims [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] [instance: 2257c786-54f9-441a-832c-cf3178bfcc78] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1464.536674] env[62405]: DEBUG nova.compute.manager [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] [instance: 3c0b964f-c900-4704-ae12-7eba7952f678] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1465.064504] env[62405]: DEBUG oslo_concurrency.lockutils [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1465.248560] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Acquiring lock "6199de01-baca-4461-9572-111eda11adac" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1465.248789] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Lock "6199de01-baca-4461-9572-111eda11adac" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1465.450900] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-981a5bcc-0fdf-4d2a-aff9-a9cbced8b4d5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.463899] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa4f6477-804a-4368-a5d8-f34acfd0ba14 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.507115] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-467ed751-1e92-4827-8efa-20f7a9eeb9fb {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.515687] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db6a8a56-a40e-4eb4-ab91-f96bb3977bcd {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.531695] env[62405]: DEBUG nova.compute.provider_tree [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1465.753158] env[62405]: DEBUG nova.compute.manager [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] [instance: 6199de01-baca-4461-9572-111eda11adac] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1466.034907] env[62405]: DEBUG nova.scheduler.client.report [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 172, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1466.294094] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1466.542110] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.174s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1466.542950] env[62405]: DEBUG nova.compute.manager [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] [instance: 2257c786-54f9-441a-832c-cf3178bfcc78] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1466.545858] env[62405]: DEBUG oslo_concurrency.lockutils [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.481s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1466.547976] env[62405]: INFO nova.compute.claims [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] [instance: 3c0b964f-c900-4704-ae12-7eba7952f678] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1467.059009] env[62405]: DEBUG nova.compute.utils [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1467.064286] env[62405]: DEBUG nova.compute.manager [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] [instance: 2257c786-54f9-441a-832c-cf3178bfcc78] Not allocating networking since 'none' was specified. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1467.435881] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Acquiring lock "262424b0-dc7d-4b6c-9539-2d6cd23a93da" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1467.436157] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Lock "262424b0-dc7d-4b6c-9539-2d6cd23a93da" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1467.565241] env[62405]: DEBUG nova.compute.manager [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] [instance: 2257c786-54f9-441a-832c-cf3178bfcc78] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1467.674406] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c0504b8-f9b5-4353-8ad0-7b233a8125e9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.685659] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d932fbf7-5839-4975-93ec-a8ffc03b7069 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.723524] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4e1a4bd-f4d6-4975-bf67-443dcf64b042 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.731607] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb48d14a-91b6-4675-9a21-d11ecbbc76fa {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.747158] env[62405]: DEBUG nova.compute.provider_tree [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1467.941675] env[62405]: DEBUG nova.compute.manager [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] [instance: 262424b0-dc7d-4b6c-9539-2d6cd23a93da] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1468.251469] env[62405]: DEBUG nova.scheduler.client.report [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 172, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1468.474132] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1468.580729] env[62405]: DEBUG nova.compute.manager [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] [instance: 2257c786-54f9-441a-832c-cf3178bfcc78] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1468.762567] env[62405]: DEBUG oslo_concurrency.lockutils [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.217s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1468.763110] env[62405]: DEBUG nova.compute.manager [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] [instance: 3c0b964f-c900-4704-ae12-7eba7952f678] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1468.768729] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.476s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1468.770774] env[62405]: INFO nova.compute.claims [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] [instance: 6199de01-baca-4461-9572-111eda11adac] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1469.272022] env[62405]: DEBUG nova.compute.utils [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1469.272235] env[62405]: DEBUG nova.compute.manager [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] [instance: 3c0b964f-c900-4704-ae12-7eba7952f678] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1469.272527] env[62405]: DEBUG nova.network.neutron [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] [instance: 3c0b964f-c900-4704-ae12-7eba7952f678] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1469.422290] env[62405]: DEBUG nova.virt.hardware [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1469.423191] env[62405]: DEBUG nova.virt.hardware [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1469.423394] env[62405]: DEBUG nova.virt.hardware [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1469.423596] env[62405]: DEBUG nova.virt.hardware [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1469.423741] env[62405]: DEBUG nova.virt.hardware [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1469.424249] env[62405]: DEBUG nova.virt.hardware [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1469.424496] env[62405]: DEBUG nova.virt.hardware [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1469.425119] env[62405]: DEBUG nova.virt.hardware [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1469.425444] env[62405]: DEBUG nova.virt.hardware [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1469.425668] env[62405]: DEBUG nova.virt.hardware [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1469.425875] env[62405]: DEBUG nova.virt.hardware [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1469.427198] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b04bb102-5f21-430f-9eb8-8c5277910523 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.436913] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21d3b1c8-19ec-424c-a16d-84a31ba1c83d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.453709] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ec55b02-1e0b-4f91-813c-50526d866826 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.473849] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] [instance: 2257c786-54f9-441a-832c-cf3178bfcc78] Instance VIF info [] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1469.495107] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1469.495107] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fdded8b8-7c94-4fbb-9513-eb8f0ca83aae {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.510438] env[62405]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 1469.510739] env[62405]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=62405) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 1469.511013] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Folder already exists: OpenStack. Parent ref: group-v4. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 1469.511211] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Creating folder: Project (3459296fe9f7486f947a1712001bab8b). Parent ref: group-v401284. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1469.511462] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b4f98e0a-fbff-420d-8079-bd0d9c613f82 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.520830] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Created folder: Project (3459296fe9f7486f947a1712001bab8b) in parent group-v401284. [ 1469.521619] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Creating folder: Instances. Parent ref: group-v401288. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1469.521619] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c4c7b73d-5094-4d4e-82d1-9fcfcd42b065 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.532349] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Created folder: Instances in parent group-v401288. [ 1469.532691] env[62405]: DEBUG oslo.service.loopingcall [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1469.532905] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2257c786-54f9-441a-832c-cf3178bfcc78] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1469.533854] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fc97661b-0899-45da-b973-989a63c79ba6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.557886] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1469.557886] env[62405]: value = "task-1946682" [ 1469.557886] env[62405]: _type = "Task" [ 1469.557886] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1469.568698] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946682, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1469.779873] env[62405]: DEBUG nova.compute.manager [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] [instance: 3c0b964f-c900-4704-ae12-7eba7952f678] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1469.835936] env[62405]: DEBUG nova.policy [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8a03cda992624be38a1cb5e69294f36e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '44c95dbb17d74a7f9f152a3f06fc5336', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1469.894982] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b3ac82c-13bc-4330-8885-57614b062880 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.902399] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1863d658-05f9-4592-ab15-d9b2f40b28fb {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.933874] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3531bf7-09ce-4d28-a064-92f187ce12b6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.944694] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2dc3c2e-28e1-4b8d-aec0-c1a10be20748 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.963593] env[62405]: DEBUG nova.compute.provider_tree [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1470.068363] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946682, 'name': CreateVM_Task, 'duration_secs': 0.290456} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1470.068615] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2257c786-54f9-441a-832c-cf3178bfcc78] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1470.069596] env[62405]: DEBUG oslo_vmware.service [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a0b7072-0431-40ea-8070-d79a50ba45f1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.075786] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1470.076096] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1470.076781] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1470.077037] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e9f533ab-e0b3-4738-acaf-bbf64b676608 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.082975] env[62405]: DEBUG oslo_vmware.api [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Waiting for the task: (returnval){ [ 1470.082975] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52f899a7-b840-f567-cc3f-f87fc8c62267" [ 1470.082975] env[62405]: _type = "Task" [ 1470.082975] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1470.094047] env[62405]: DEBUG oslo_vmware.api [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52f899a7-b840-f567-cc3f-f87fc8c62267, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1470.468506] env[62405]: DEBUG nova.scheduler.client.report [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 172, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1470.601261] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1470.601261] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] [instance: 2257c786-54f9-441a-832c-cf3178bfcc78] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1470.601744] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1470.603044] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1470.603044] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1470.603044] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a87b98c3-4060-4736-b44c-ae7d667989cf {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.611214] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1470.611428] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1470.613155] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e9c13d8-e61c-4602-bf80-160361ee4580 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.620296] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e1493b24-e09e-40f8-a20a-0825acdecd7d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.626177] env[62405]: DEBUG oslo_vmware.api [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Waiting for the task: (returnval){ [ 1470.626177] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5298fa4b-be7b-76bc-829a-a4e8a3a09c69" [ 1470.626177] env[62405]: _type = "Task" [ 1470.626177] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1470.634574] env[62405]: DEBUG oslo_vmware.api [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5298fa4b-be7b-76bc-829a-a4e8a3a09c69, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1470.793031] env[62405]: DEBUG nova.compute.manager [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] [instance: 3c0b964f-c900-4704-ae12-7eba7952f678] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1470.820139] env[62405]: DEBUG nova.virt.hardware [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1470.820139] env[62405]: DEBUG nova.virt.hardware [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1470.820139] env[62405]: DEBUG nova.virt.hardware [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1470.820139] env[62405]: DEBUG nova.virt.hardware [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1470.820542] env[62405]: DEBUG nova.virt.hardware [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1470.821138] env[62405]: DEBUG nova.virt.hardware [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1470.821448] env[62405]: DEBUG nova.virt.hardware [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1470.822036] env[62405]: DEBUG nova.virt.hardware [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1470.822264] env[62405]: DEBUG nova.virt.hardware [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1470.822640] env[62405]: DEBUG nova.virt.hardware [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1470.822898] env[62405]: DEBUG nova.virt.hardware [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1470.823841] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b55e938-9761-4938-81ab-373d5c8d780f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.832890] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3566c661-f73b-4f9a-824b-f750cece6eb1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.976037] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.205s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1470.976037] env[62405]: DEBUG nova.compute.manager [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] [instance: 6199de01-baca-4461-9572-111eda11adac] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1470.978965] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.505s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1470.980988] env[62405]: INFO nova.compute.claims [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] [instance: 262424b0-dc7d-4b6c-9539-2d6cd23a93da] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1471.138621] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] [instance: 2257c786-54f9-441a-832c-cf3178bfcc78] Preparing fetch location {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1471.138855] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Creating directory with path [datastore1] vmware_temp/7e04c175-8f0f-43b3-913e-1f049d10bf6f/e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1471.139722] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c9a63b76-3a0a-48b9-a6fa-a7433fbdb1c6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.178018] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Created directory with path [datastore1] vmware_temp/7e04c175-8f0f-43b3-913e-1f049d10bf6f/e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1471.178018] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] [instance: 2257c786-54f9-441a-832c-cf3178bfcc78] Fetch image to [datastore1] vmware_temp/7e04c175-8f0f-43b3-913e-1f049d10bf6f/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/tmp-sparse.vmdk {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1471.178018] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] [instance: 2257c786-54f9-441a-832c-cf3178bfcc78] Downloading image file data e6bba7a8-c2de-41dc-871a-3859bba5f4f9 to [datastore1] vmware_temp/7e04c175-8f0f-43b3-913e-1f049d10bf6f/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/tmp-sparse.vmdk on the data store datastore1 {{(pid=62405) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1471.178018] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d100839-5323-421b-bbce-ee71113bb212 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.186370] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c8c609c-e227-4345-89d5-b35235123af1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.196690] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bc92053-5ce8-49be-9d5b-36d66d4c81f0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.234209] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc28b900-eda0-41c4-8533-05a2f43feff2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.241052] env[62405]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-a3662865-32fd-46fe-9e3a-4897cbb46d29 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.331086] env[62405]: DEBUG nova.virt.vmwareapi.images [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] [instance: 2257c786-54f9-441a-832c-cf3178bfcc78] Downloading image file data e6bba7a8-c2de-41dc-871a-3859bba5f4f9 to the data store datastore1 {{(pid=62405) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1471.396319] env[62405]: DEBUG oslo_vmware.rw_handles [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7e04c175-8f0f-43b3-913e-1f049d10bf6f/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62405) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1471.486197] env[62405]: DEBUG nova.compute.utils [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1471.492810] env[62405]: DEBUG nova.compute.manager [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] [instance: 6199de01-baca-4461-9572-111eda11adac] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1471.492810] env[62405]: DEBUG nova.network.neutron [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] [instance: 6199de01-baca-4461-9572-111eda11adac] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1471.825716] env[62405]: DEBUG nova.policy [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd97ce1e0a1d54796a96d5b48b15f8a92', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '21cc9ecf7d5e4a5c80b8febb406cd6d6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1471.991994] env[62405]: DEBUG nova.compute.manager [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] [instance: 6199de01-baca-4461-9572-111eda11adac] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1472.084971] env[62405]: DEBUG oslo_vmware.rw_handles [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Completed reading data from the image iterator. {{(pid=62405) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1472.084971] env[62405]: DEBUG oslo_vmware.rw_handles [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7e04c175-8f0f-43b3-913e-1f049d10bf6f/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62405) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1472.122505] env[62405]: DEBUG nova.network.neutron [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] [instance: 3c0b964f-c900-4704-ae12-7eba7952f678] Successfully created port: ff5be597-1e44-4215-81eb-9129935b393c {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1472.135169] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-614c61d0-e03f-40a6-97c9-f02cfc1d7f56 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.143135] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7e612e3-620a-49f4-8295-6697666fff21 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.186133] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12ee27cc-6e45-45b9-8ca2-54ffee037769 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.196278] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be4b16f9-d232-4ef0-b009-cac4a5a9daf4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.212258] env[62405]: DEBUG nova.compute.provider_tree [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1472.242259] env[62405]: DEBUG nova.virt.vmwareapi.images [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] [instance: 2257c786-54f9-441a-832c-cf3178bfcc78] Downloaded image file data e6bba7a8-c2de-41dc-871a-3859bba5f4f9 to vmware_temp/7e04c175-8f0f-43b3-913e-1f049d10bf6f/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/tmp-sparse.vmdk on the data store datastore1 {{(pid=62405) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1472.242259] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] [instance: 2257c786-54f9-441a-832c-cf3178bfcc78] Caching image {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1472.242259] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Copying Virtual Disk [datastore1] vmware_temp/7e04c175-8f0f-43b3-913e-1f049d10bf6f/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/tmp-sparse.vmdk to [datastore1] vmware_temp/7e04c175-8f0f-43b3-913e-1f049d10bf6f/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1472.243033] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-72f2f901-0616-4f8d-bfe6-294689a5c67a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.252443] env[62405]: DEBUG oslo_vmware.api [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Waiting for the task: (returnval){ [ 1472.252443] env[62405]: value = "task-1946684" [ 1472.252443] env[62405]: _type = "Task" [ 1472.252443] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1472.266533] env[62405]: DEBUG oslo_vmware.api [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Task: {'id': task-1946684, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1472.624163] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquiring lock "8624629d-642a-4adf-984e-3925beeb4fef" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1472.624414] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Lock "8624629d-642a-4adf-984e-3925beeb4fef" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1472.721266] env[62405]: DEBUG nova.scheduler.client.report [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 172, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1472.766046] env[62405]: DEBUG oslo_vmware.api [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Task: {'id': task-1946684, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1473.019847] env[62405]: DEBUG nova.compute.manager [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] [instance: 6199de01-baca-4461-9572-111eda11adac] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1473.047450] env[62405]: DEBUG nova.virt.hardware [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1473.047703] env[62405]: DEBUG nova.virt.hardware [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1473.047862] env[62405]: DEBUG nova.virt.hardware [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1473.048059] env[62405]: DEBUG nova.virt.hardware [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1473.048211] env[62405]: DEBUG nova.virt.hardware [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1473.048360] env[62405]: DEBUG nova.virt.hardware [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1473.051099] env[62405]: DEBUG nova.virt.hardware [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1473.051298] env[62405]: DEBUG nova.virt.hardware [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1473.051485] env[62405]: DEBUG nova.virt.hardware [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1473.051677] env[62405]: DEBUG nova.virt.hardware [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1473.051844] env[62405]: DEBUG nova.virt.hardware [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1473.052746] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e76defd-3513-4a8d-8a7a-bd1d66a4fb10 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.061299] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f597f12-40f1-4f04-b2d3-261b6691da78 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.127260] env[62405]: DEBUG nova.compute.manager [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 8624629d-642a-4adf-984e-3925beeb4fef] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1473.228840] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.250s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1473.229394] env[62405]: DEBUG nova.compute.manager [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] [instance: 262424b0-dc7d-4b6c-9539-2d6cd23a93da] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1473.266376] env[62405]: DEBUG oslo_vmware.api [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Task: {'id': task-1946684, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.685491} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1473.266554] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Copied Virtual Disk [datastore1] vmware_temp/7e04c175-8f0f-43b3-913e-1f049d10bf6f/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/tmp-sparse.vmdk to [datastore1] vmware_temp/7e04c175-8f0f-43b3-913e-1f049d10bf6f/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1473.268387] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Deleting the datastore file [datastore1] vmware_temp/7e04c175-8f0f-43b3-913e-1f049d10bf6f/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/tmp-sparse.vmdk {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1473.268387] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6104bbce-3e23-46c6-a4f5-7924cb02af6d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.275888] env[62405]: DEBUG oslo_vmware.api [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Waiting for the task: (returnval){ [ 1473.275888] env[62405]: value = "task-1946686" [ 1473.275888] env[62405]: _type = "Task" [ 1473.275888] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1473.290407] env[62405]: DEBUG oslo_vmware.api [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Task: {'id': task-1946686, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1473.564281] env[62405]: DEBUG nova.network.neutron [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] [instance: 6199de01-baca-4461-9572-111eda11adac] Successfully created port: 9cfd8f38-b13f-4aae-b836-8df8b8a50eb7 {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1473.669395] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1473.670034] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1473.671413] env[62405]: INFO nova.compute.claims [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 8624629d-642a-4adf-984e-3925beeb4fef] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1473.740185] env[62405]: DEBUG nova.compute.utils [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1473.741566] env[62405]: DEBUG nova.compute.manager [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] [instance: 262424b0-dc7d-4b6c-9539-2d6cd23a93da] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1473.741730] env[62405]: DEBUG nova.network.neutron [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] [instance: 262424b0-dc7d-4b6c-9539-2d6cd23a93da] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1473.787010] env[62405]: DEBUG oslo_vmware.api [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Task: {'id': task-1946686, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.026225} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1473.787378] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1473.787503] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Moving file from [datastore1] vmware_temp/7e04c175-8f0f-43b3-913e-1f049d10bf6f/e6bba7a8-c2de-41dc-871a-3859bba5f4f9 to [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9. {{(pid=62405) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 1473.788017] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-61d92f19-6eab-4dbe-a975-ce311f6ae49e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.795504] env[62405]: DEBUG oslo_vmware.api [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Waiting for the task: (returnval){ [ 1473.795504] env[62405]: value = "task-1946687" [ 1473.795504] env[62405]: _type = "Task" [ 1473.795504] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1473.804195] env[62405]: DEBUG oslo_vmware.api [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Task: {'id': task-1946687, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1473.822264] env[62405]: DEBUG nova.policy [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'dd25c41c232349ef87887a4285b71767', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6d1aee7c44f44abc86ed5c15b027e989', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1474.249885] env[62405]: DEBUG nova.compute.manager [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] [instance: 262424b0-dc7d-4b6c-9539-2d6cd23a93da] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1474.308199] env[62405]: DEBUG oslo_vmware.api [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Task: {'id': task-1946687, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.028071} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1474.308199] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] File moved {{(pid=62405) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 1474.308199] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] [instance: 2257c786-54f9-441a-832c-cf3178bfcc78] Cleaning up location [datastore1] vmware_temp/7e04c175-8f0f-43b3-913e-1f049d10bf6f {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1474.308358] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Deleting the datastore file [datastore1] vmware_temp/7e04c175-8f0f-43b3-913e-1f049d10bf6f {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1474.308543] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-606b5741-a898-4a38-9721-8dd5a11b631b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.318776] env[62405]: DEBUG oslo_concurrency.lockutils [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Acquiring lock "0491dc4b-cf35-4035-aca9-baf43b86af7e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1474.319376] env[62405]: DEBUG oslo_concurrency.lockutils [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Lock "0491dc4b-cf35-4035-aca9-baf43b86af7e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1474.320709] env[62405]: DEBUG oslo_vmware.api [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Waiting for the task: (returnval){ [ 1474.320709] env[62405]: value = "task-1946688" [ 1474.320709] env[62405]: _type = "Task" [ 1474.320709] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1474.332168] env[62405]: DEBUG oslo_vmware.api [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Task: {'id': task-1946688, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1474.408438] env[62405]: DEBUG nova.network.neutron [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] [instance: 262424b0-dc7d-4b6c-9539-2d6cd23a93da] Successfully created port: 0974798a-a146-421e-a104-caeb56db51b3 {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1474.823589] env[62405]: DEBUG nova.compute.manager [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] [instance: 0491dc4b-cf35-4035-aca9-baf43b86af7e] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1474.847032] env[62405]: DEBUG oslo_vmware.api [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Task: {'id': task-1946688, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.024621} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1474.847032] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1474.847624] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c24d1ff2-ed9f-4663-ba00-91a1ea521893 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.855458] env[62405]: DEBUG oslo_vmware.api [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Waiting for the task: (returnval){ [ 1474.855458] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52484151-4b1b-aa8e-9288-54ff008e03fe" [ 1474.855458] env[62405]: _type = "Task" [ 1474.855458] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1474.871374] env[62405]: DEBUG oslo_vmware.api [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52484151-4b1b-aa8e-9288-54ff008e03fe, 'name': SearchDatastore_Task, 'duration_secs': 0.008442} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1474.871650] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1474.871903] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 2257c786-54f9-441a-832c-cf3178bfcc78/2257c786-54f9-441a-832c-cf3178bfcc78.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1474.872179] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-707de154-a8a7-4560-97d2-17d06a1e1ee3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.882249] env[62405]: DEBUG oslo_vmware.api [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Waiting for the task: (returnval){ [ 1474.882249] env[62405]: value = "task-1946689" [ 1474.882249] env[62405]: _type = "Task" [ 1474.882249] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1474.892095] env[62405]: DEBUG oslo_vmware.api [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Task: {'id': task-1946689, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1474.907297] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d26ad435-5497-4dc5-a38a-b20b54d69864 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.919152] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd733698-b982-4a10-a36e-5d0173e4f1ae {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.955748] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dff939c-fe11-4561-b073-5c2485cd1a9f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.963927] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac694668-e983-4416-918a-680094ab4668 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.984221] env[62405]: DEBUG nova.compute.provider_tree [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1475.138092] env[62405]: DEBUG oslo_concurrency.lockutils [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Acquiring lock "058682a1-5240-4414-9203-c612ecd12999" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1475.138092] env[62405]: DEBUG oslo_concurrency.lockutils [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Lock "058682a1-5240-4414-9203-c612ecd12999" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1475.266587] env[62405]: DEBUG nova.compute.manager [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] [instance: 262424b0-dc7d-4b6c-9539-2d6cd23a93da] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1475.308711] env[62405]: DEBUG nova.virt.hardware [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1475.309121] env[62405]: DEBUG nova.virt.hardware [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1475.311223] env[62405]: DEBUG nova.virt.hardware [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1475.311223] env[62405]: DEBUG nova.virt.hardware [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1475.311223] env[62405]: DEBUG nova.virt.hardware [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1475.311223] env[62405]: DEBUG nova.virt.hardware [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1475.311223] env[62405]: DEBUG nova.virt.hardware [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1475.311481] env[62405]: DEBUG nova.virt.hardware [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1475.312341] env[62405]: DEBUG nova.virt.hardware [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1475.312341] env[62405]: DEBUG nova.virt.hardware [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1475.312341] env[62405]: DEBUG nova.virt.hardware [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1475.313199] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aac96dd1-82a9-462e-853c-97a24140344a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.321923] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-572df6d9-0626-4fe7-a42f-10eae394ca39 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.359445] env[62405]: DEBUG oslo_concurrency.lockutils [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1475.396585] env[62405]: DEBUG oslo_vmware.api [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Task: {'id': task-1946689, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.453205} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1475.396585] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 2257c786-54f9-441a-832c-cf3178bfcc78/2257c786-54f9-441a-832c-cf3178bfcc78.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1475.396585] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] [instance: 2257c786-54f9-441a-832c-cf3178bfcc78] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1475.396585] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-00afa009-9e81-45ec-8683-3d3631281b20 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.406262] env[62405]: DEBUG oslo_vmware.api [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Waiting for the task: (returnval){ [ 1475.406262] env[62405]: value = "task-1946691" [ 1475.406262] env[62405]: _type = "Task" [ 1475.406262] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1475.415118] env[62405]: DEBUG oslo_vmware.api [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Task: {'id': task-1946691, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1475.487637] env[62405]: DEBUG nova.scheduler.client.report [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 172, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1475.566458] env[62405]: DEBUG nova.network.neutron [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] [instance: 3c0b964f-c900-4704-ae12-7eba7952f678] Successfully updated port: ff5be597-1e44-4215-81eb-9129935b393c {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1475.641178] env[62405]: DEBUG nova.compute.manager [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 058682a1-5240-4414-9203-c612ecd12999] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1475.917617] env[62405]: DEBUG oslo_vmware.api [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Task: {'id': task-1946691, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.060273} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1475.917895] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] [instance: 2257c786-54f9-441a-832c-cf3178bfcc78] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1475.920032] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-426dc818-7a93-48ef-a904-4f69a08b4c10 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.947203] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] [instance: 2257c786-54f9-441a-832c-cf3178bfcc78] Reconfiguring VM instance instance-00000001 to attach disk [datastore1] 2257c786-54f9-441a-832c-cf3178bfcc78/2257c786-54f9-441a-832c-cf3178bfcc78.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1475.947871] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d45db82c-2845-46b2-a7aa-057a027dbb7b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.970797] env[62405]: DEBUG oslo_vmware.api [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Waiting for the task: (returnval){ [ 1475.970797] env[62405]: value = "task-1946692" [ 1475.970797] env[62405]: _type = "Task" [ 1475.970797] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1475.982835] env[62405]: DEBUG oslo_vmware.api [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Task: {'id': task-1946692, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1475.994948] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.325s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1475.995552] env[62405]: DEBUG nova.compute.manager [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 8624629d-642a-4adf-984e-3925beeb4fef] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1475.998769] env[62405]: DEBUG oslo_concurrency.lockutils [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.639s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1476.000328] env[62405]: INFO nova.compute.claims [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] [instance: 0491dc4b-cf35-4035-aca9-baf43b86af7e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1476.076705] env[62405]: DEBUG oslo_concurrency.lockutils [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Acquiring lock "refresh_cache-3c0b964f-c900-4704-ae12-7eba7952f678" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1476.076860] env[62405]: DEBUG oslo_concurrency.lockutils [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Acquired lock "refresh_cache-3c0b964f-c900-4704-ae12-7eba7952f678" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1476.077020] env[62405]: DEBUG nova.network.neutron [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] [instance: 3c0b964f-c900-4704-ae12-7eba7952f678] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1476.104213] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Acquiring lock "9b71f962-2b92-4f7b-bb8d-b50da5130018" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1476.106317] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Lock "9b71f962-2b92-4f7b-bb8d-b50da5130018" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1476.264557] env[62405]: DEBUG oslo_concurrency.lockutils [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1476.481605] env[62405]: DEBUG oslo_vmware.api [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Task: {'id': task-1946692, 'name': ReconfigVM_Task, 'duration_secs': 0.251669} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1476.483170] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] [instance: 2257c786-54f9-441a-832c-cf3178bfcc78] Reconfigured VM instance instance-00000001 to attach disk [datastore1] 2257c786-54f9-441a-832c-cf3178bfcc78/2257c786-54f9-441a-832c-cf3178bfcc78.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1476.483932] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9ee4b437-770d-48cf-91b0-2a2dfbd1f929 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.491652] env[62405]: DEBUG oslo_vmware.api [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Waiting for the task: (returnval){ [ 1476.491652] env[62405]: value = "task-1946693" [ 1476.491652] env[62405]: _type = "Task" [ 1476.491652] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1476.502170] env[62405]: DEBUG oslo_vmware.api [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Task: {'id': task-1946693, 'name': Rename_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1476.506047] env[62405]: DEBUG nova.compute.utils [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1476.510127] env[62405]: DEBUG nova.compute.manager [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 8624629d-642a-4adf-984e-3925beeb4fef] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1476.511302] env[62405]: DEBUG nova.network.neutron [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 8624629d-642a-4adf-984e-3925beeb4fef] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1476.610290] env[62405]: DEBUG nova.compute.manager [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] [instance: 9b71f962-2b92-4f7b-bb8d-b50da5130018] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1476.745011] env[62405]: DEBUG nova.network.neutron [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] [instance: 3c0b964f-c900-4704-ae12-7eba7952f678] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1476.773580] env[62405]: DEBUG nova.policy [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f5f866535fb94dd0b0ddddddd7da60b2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '41626e27199f4370a2554bb243a72d41', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1477.005618] env[62405]: DEBUG oslo_vmware.api [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Task: {'id': task-1946693, 'name': Rename_Task, 'duration_secs': 0.135005} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1477.005884] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] [instance: 2257c786-54f9-441a-832c-cf3178bfcc78] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1477.006205] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4c65d372-49de-4695-b0a1-c87a4dabbc76 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.013858] env[62405]: DEBUG nova.compute.manager [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 8624629d-642a-4adf-984e-3925beeb4fef] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1477.022028] env[62405]: DEBUG oslo_vmware.api [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Waiting for the task: (returnval){ [ 1477.022028] env[62405]: value = "task-1946694" [ 1477.022028] env[62405]: _type = "Task" [ 1477.022028] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1477.030943] env[62405]: DEBUG oslo_vmware.api [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Task: {'id': task-1946694, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1477.128338] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1477.214431] env[62405]: DEBUG nova.compute.manager [req-5cf62f56-ec90-468d-be15-eb30178441bc req-a2d6234c-4904-40a1-bdd9-9996a567e4b5 service nova] [instance: 3c0b964f-c900-4704-ae12-7eba7952f678] Received event network-vif-plugged-ff5be597-1e44-4215-81eb-9129935b393c {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1477.214640] env[62405]: DEBUG oslo_concurrency.lockutils [req-5cf62f56-ec90-468d-be15-eb30178441bc req-a2d6234c-4904-40a1-bdd9-9996a567e4b5 service nova] Acquiring lock "3c0b964f-c900-4704-ae12-7eba7952f678-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1477.214743] env[62405]: DEBUG oslo_concurrency.lockutils [req-5cf62f56-ec90-468d-be15-eb30178441bc req-a2d6234c-4904-40a1-bdd9-9996a567e4b5 service nova] Lock "3c0b964f-c900-4704-ae12-7eba7952f678-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1477.215333] env[62405]: DEBUG oslo_concurrency.lockutils [req-5cf62f56-ec90-468d-be15-eb30178441bc req-a2d6234c-4904-40a1-bdd9-9996a567e4b5 service nova] Lock "3c0b964f-c900-4704-ae12-7eba7952f678-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1477.218606] env[62405]: DEBUG nova.compute.manager [req-5cf62f56-ec90-468d-be15-eb30178441bc req-a2d6234c-4904-40a1-bdd9-9996a567e4b5 service nova] [instance: 3c0b964f-c900-4704-ae12-7eba7952f678] No waiting events found dispatching network-vif-plugged-ff5be597-1e44-4215-81eb-9129935b393c {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1477.220112] env[62405]: WARNING nova.compute.manager [req-5cf62f56-ec90-468d-be15-eb30178441bc req-a2d6234c-4904-40a1-bdd9-9996a567e4b5 service nova] [instance: 3c0b964f-c900-4704-ae12-7eba7952f678] Received unexpected event network-vif-plugged-ff5be597-1e44-4215-81eb-9129935b393c for instance with vm_state building and task_state spawning. [ 1477.223812] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93b7eddd-84a2-4532-9edc-773c202de253 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.240121] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ceed198-85aa-43d3-982c-fa4d33c1a0c9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.281026] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca17b37d-5db0-452f-9a24-d4bf4c2855bd {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.288584] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9c7d581-7859-4354-9baa-d045b1f199aa {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.304328] env[62405]: DEBUG nova.compute.provider_tree [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1477.443472] env[62405]: DEBUG nova.network.neutron [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] [instance: 3c0b964f-c900-4704-ae12-7eba7952f678] Updating instance_info_cache with network_info: [{"id": "ff5be597-1e44-4215-81eb-9129935b393c", "address": "fa:16:3e:17:77:42", "network": {"id": "9e3e6e3d-df77-428f-b577-e4a42e82ec43", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.35", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "69dc3a146cd14230b1180689e2fea090", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31e77685-b4dd-4810-80ef-24115ea9ea62", "external-id": "nsx-vlan-transportzone-56", "segmentation_id": 56, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff5be597-1e", "ovs_interfaceid": "ff5be597-1e44-4215-81eb-9129935b393c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1477.541270] env[62405]: DEBUG oslo_vmware.api [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Task: {'id': task-1946694, 'name': PowerOnVM_Task, 'duration_secs': 0.444462} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1477.541824] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] [instance: 2257c786-54f9-441a-832c-cf3178bfcc78] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1477.542090] env[62405]: INFO nova.compute.manager [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] [instance: 2257c786-54f9-441a-832c-cf3178bfcc78] Took 8.96 seconds to spawn the instance on the hypervisor. [ 1477.542486] env[62405]: DEBUG nova.compute.manager [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] [instance: 2257c786-54f9-441a-832c-cf3178bfcc78] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1477.543681] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b8c55fe-cf41-44a1-9cdf-a678df8394ba {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.699294] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Acquiring lock "801e7086-5742-4a04-962c-7546284aa12d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1477.699522] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Lock "801e7086-5742-4a04-962c-7546284aa12d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1477.808345] env[62405]: DEBUG nova.scheduler.client.report [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 172, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1477.882913] env[62405]: DEBUG nova.network.neutron [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] [instance: 6199de01-baca-4461-9572-111eda11adac] Successfully updated port: 9cfd8f38-b13f-4aae-b836-8df8b8a50eb7 {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1477.949758] env[62405]: DEBUG oslo_concurrency.lockutils [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Releasing lock "refresh_cache-3c0b964f-c900-4704-ae12-7eba7952f678" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1477.950503] env[62405]: DEBUG nova.compute.manager [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] [instance: 3c0b964f-c900-4704-ae12-7eba7952f678] Instance network_info: |[{"id": "ff5be597-1e44-4215-81eb-9129935b393c", "address": "fa:16:3e:17:77:42", "network": {"id": "9e3e6e3d-df77-428f-b577-e4a42e82ec43", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.35", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "69dc3a146cd14230b1180689e2fea090", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31e77685-b4dd-4810-80ef-24115ea9ea62", "external-id": "nsx-vlan-transportzone-56", "segmentation_id": 56, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff5be597-1e", "ovs_interfaceid": "ff5be597-1e44-4215-81eb-9129935b393c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1477.950802] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] [instance: 3c0b964f-c900-4704-ae12-7eba7952f678] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:17:77:42', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '31e77685-b4dd-4810-80ef-24115ea9ea62', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ff5be597-1e44-4215-81eb-9129935b393c', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1477.961423] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Creating folder: Project (44c95dbb17d74a7f9f152a3f06fc5336). Parent ref: group-v401284. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1477.962189] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f37e2e83-ee18-4bf3-ad35-fdf8528c31f9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.974195] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Created folder: Project (44c95dbb17d74a7f9f152a3f06fc5336) in parent group-v401284. [ 1477.977973] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Creating folder: Instances. Parent ref: group-v401292. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1477.977973] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e91bea6d-cb71-45f0-b72f-6360c2c47859 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.986163] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Created folder: Instances in parent group-v401292. [ 1477.987221] env[62405]: DEBUG oslo.service.loopingcall [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1477.987221] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3c0b964f-c900-4704-ae12-7eba7952f678] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1477.987221] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d13b60ae-d528-4b19-90f1-d14efbcdf908 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.011890] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1478.011890] env[62405]: value = "task-1946698" [ 1478.011890] env[62405]: _type = "Task" [ 1478.011890] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1478.021804] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946698, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1478.037569] env[62405]: DEBUG nova.compute.manager [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 8624629d-642a-4adf-984e-3925beeb4fef] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1478.075569] env[62405]: INFO nova.compute.manager [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] [instance: 2257c786-54f9-441a-832c-cf3178bfcc78] Took 13.75 seconds to build instance. [ 1478.081701] env[62405]: DEBUG nova.virt.hardware [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1478.081701] env[62405]: DEBUG nova.virt.hardware [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1478.081701] env[62405]: DEBUG nova.virt.hardware [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1478.082659] env[62405]: DEBUG nova.virt.hardware [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1478.082659] env[62405]: DEBUG nova.virt.hardware [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1478.082659] env[62405]: DEBUG nova.virt.hardware [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1478.082659] env[62405]: DEBUG nova.virt.hardware [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1478.082659] env[62405]: DEBUG nova.virt.hardware [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1478.084644] env[62405]: DEBUG nova.virt.hardware [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1478.084644] env[62405]: DEBUG nova.virt.hardware [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1478.084644] env[62405]: DEBUG nova.virt.hardware [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1478.084644] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe5125dc-e506-4da5-96e3-9d8efa4160de {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.094053] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-956b5755-d013-4ae9-905c-553344603d2d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.204036] env[62405]: DEBUG nova.compute.manager [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] [instance: 801e7086-5742-4a04-962c-7546284aa12d] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1478.316992] env[62405]: DEBUG oslo_concurrency.lockutils [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.318s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1478.317544] env[62405]: DEBUG nova.compute.manager [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] [instance: 0491dc4b-cf35-4035-aca9-baf43b86af7e] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1478.323041] env[62405]: DEBUG oslo_concurrency.lockutils [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.056s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1478.324619] env[62405]: INFO nova.compute.claims [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 058682a1-5240-4414-9203-c612ecd12999] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1478.386165] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Acquiring lock "refresh_cache-6199de01-baca-4461-9572-111eda11adac" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1478.386332] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Acquired lock "refresh_cache-6199de01-baca-4461-9572-111eda11adac" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1478.386469] env[62405]: DEBUG nova.network.neutron [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] [instance: 6199de01-baca-4461-9572-111eda11adac] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1478.526513] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946698, 'name': CreateVM_Task, 'duration_secs': 0.344402} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1478.526797] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3c0b964f-c900-4704-ae12-7eba7952f678] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1478.548940] env[62405]: DEBUG oslo_concurrency.lockutils [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1478.552024] env[62405]: DEBUG oslo_concurrency.lockutils [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1478.552024] env[62405]: DEBUG oslo_concurrency.lockutils [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1478.552024] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-658b9422-6ff0-4263-ac6e-a96d05f79e33 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.558625] env[62405]: DEBUG oslo_vmware.api [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Waiting for the task: (returnval){ [ 1478.558625] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52385cce-e7b8-2997-e0c9-1d349106d660" [ 1478.558625] env[62405]: _type = "Task" [ 1478.558625] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1478.573463] env[62405]: DEBUG oslo_vmware.api [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52385cce-e7b8-2997-e0c9-1d349106d660, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1478.589618] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ecd7d637-e785-4c8b-8c56-16fc9fcaf2c3 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Lock "2257c786-54f9-441a-832c-cf3178bfcc78" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.279s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1478.672899] env[62405]: DEBUG nova.network.neutron [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 8624629d-642a-4adf-984e-3925beeb4fef] Successfully created port: 15d03dd1-4edd-413d-a67d-3c877a40692a {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1478.736421] env[62405]: DEBUG nova.network.neutron [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] [instance: 262424b0-dc7d-4b6c-9539-2d6cd23a93da] Successfully updated port: 0974798a-a146-421e-a104-caeb56db51b3 {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1478.740163] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1478.830600] env[62405]: DEBUG nova.compute.utils [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1478.837049] env[62405]: DEBUG nova.compute.manager [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] [instance: 0491dc4b-cf35-4035-aca9-baf43b86af7e] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1478.837049] env[62405]: DEBUG nova.network.neutron [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] [instance: 0491dc4b-cf35-4035-aca9-baf43b86af7e] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1478.890430] env[62405]: DEBUG nova.compute.manager [req-3d0b20ca-4248-4c55-b6c6-8cdcc30f72ca req-92f1f469-38a4-4094-af39-4a45e9cc667f service nova] [instance: 6199de01-baca-4461-9572-111eda11adac] Received event network-vif-plugged-9cfd8f38-b13f-4aae-b836-8df8b8a50eb7 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1478.894138] env[62405]: DEBUG oslo_concurrency.lockutils [req-3d0b20ca-4248-4c55-b6c6-8cdcc30f72ca req-92f1f469-38a4-4094-af39-4a45e9cc667f service nova] Acquiring lock "6199de01-baca-4461-9572-111eda11adac-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1478.894138] env[62405]: DEBUG oslo_concurrency.lockutils [req-3d0b20ca-4248-4c55-b6c6-8cdcc30f72ca req-92f1f469-38a4-4094-af39-4a45e9cc667f service nova] Lock "6199de01-baca-4461-9572-111eda11adac-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1478.894138] env[62405]: DEBUG oslo_concurrency.lockutils [req-3d0b20ca-4248-4c55-b6c6-8cdcc30f72ca req-92f1f469-38a4-4094-af39-4a45e9cc667f service nova] Lock "6199de01-baca-4461-9572-111eda11adac-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1478.894138] env[62405]: DEBUG nova.compute.manager [req-3d0b20ca-4248-4c55-b6c6-8cdcc30f72ca req-92f1f469-38a4-4094-af39-4a45e9cc667f service nova] [instance: 6199de01-baca-4461-9572-111eda11adac] No waiting events found dispatching network-vif-plugged-9cfd8f38-b13f-4aae-b836-8df8b8a50eb7 {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1478.894138] env[62405]: WARNING nova.compute.manager [req-3d0b20ca-4248-4c55-b6c6-8cdcc30f72ca req-92f1f469-38a4-4094-af39-4a45e9cc667f service nova] [instance: 6199de01-baca-4461-9572-111eda11adac] Received unexpected event network-vif-plugged-9cfd8f38-b13f-4aae-b836-8df8b8a50eb7 for instance with vm_state building and task_state spawning. [ 1479.075674] env[62405]: DEBUG oslo_vmware.api [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52385cce-e7b8-2997-e0c9-1d349106d660, 'name': SearchDatastore_Task, 'duration_secs': 0.013389} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1479.076546] env[62405]: DEBUG nova.network.neutron [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] [instance: 6199de01-baca-4461-9572-111eda11adac] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1479.078577] env[62405]: DEBUG oslo_concurrency.lockutils [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1479.079597] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] [instance: 3c0b964f-c900-4704-ae12-7eba7952f678] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1479.079718] env[62405]: DEBUG oslo_concurrency.lockutils [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1479.080727] env[62405]: DEBUG oslo_concurrency.lockutils [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1479.080727] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1479.080727] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5faa6557-9855-45e9-832a-0efb62815731 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.098085] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1479.098085] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1479.101309] env[62405]: DEBUG nova.policy [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9de144c120964b4db3259caf5dea43f0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8cf1f39c8aef41df8c86777f80980664', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1479.102781] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1010cbe6-f2c2-4cde-a35b-116602421094 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.115193] env[62405]: DEBUG oslo_vmware.api [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Waiting for the task: (returnval){ [ 1479.115193] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52a1db50-6de9-c49e-84ee-63840f5831d4" [ 1479.115193] env[62405]: _type = "Task" [ 1479.115193] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1479.125983] env[62405]: DEBUG oslo_vmware.api [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52a1db50-6de9-c49e-84ee-63840f5831d4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1479.241754] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Acquiring lock "refresh_cache-262424b0-dc7d-4b6c-9539-2d6cd23a93da" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1479.243032] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Acquired lock "refresh_cache-262424b0-dc7d-4b6c-9539-2d6cd23a93da" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1479.243032] env[62405]: DEBUG nova.network.neutron [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] [instance: 262424b0-dc7d-4b6c-9539-2d6cd23a93da] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1479.335715] env[62405]: DEBUG nova.compute.manager [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] [instance: 0491dc4b-cf35-4035-aca9-baf43b86af7e] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1479.555936] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdf54d67-c20d-4b81-87e8-dd6b1e50ab5b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.572871] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf848f9c-e1b3-4548-bfce-73e46a01c5b6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.606813] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a30d180-b5c8-47eb-af56-af424d28e74b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.615751] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17ce4979-fe11-4765-8e82-f26e31ebb3cb {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.635284] env[62405]: DEBUG nova.compute.provider_tree [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1479.645929] env[62405]: DEBUG oslo_vmware.api [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52a1db50-6de9-c49e-84ee-63840f5831d4, 'name': SearchDatastore_Task, 'duration_secs': 0.037263} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1479.646837] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d4cffa72-a776-4278-8290-38c9283e92a4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.653647] env[62405]: DEBUG oslo_vmware.api [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Waiting for the task: (returnval){ [ 1479.653647] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52a32f77-ce2f-f7e1-4a13-c27175125333" [ 1479.653647] env[62405]: _type = "Task" [ 1479.653647] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1479.658952] env[62405]: DEBUG nova.network.neutron [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] [instance: 6199de01-baca-4461-9572-111eda11adac] Updating instance_info_cache with network_info: [{"id": "9cfd8f38-b13f-4aae-b836-8df8b8a50eb7", "address": "fa:16:3e:69:2e:ac", "network": {"id": "e27eb8f6-2757-46d5-aaee-55a231b6762e", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1167248497-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21cc9ecf7d5e4a5c80b8febb406cd6d6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e2e8b74b-aa27-4f31-9414-7bcf531e8642", "external-id": "nsx-vlan-transportzone-544", "segmentation_id": 544, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9cfd8f38-b1", "ovs_interfaceid": "9cfd8f38-b13f-4aae-b836-8df8b8a50eb7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1479.665276] env[62405]: DEBUG oslo_vmware.api [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52a32f77-ce2f-f7e1-4a13-c27175125333, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1479.834501] env[62405]: DEBUG nova.network.neutron [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] [instance: 262424b0-dc7d-4b6c-9539-2d6cd23a93da] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1480.038616] env[62405]: DEBUG nova.network.neutron [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] [instance: 262424b0-dc7d-4b6c-9539-2d6cd23a93da] Updating instance_info_cache with network_info: [{"id": "0974798a-a146-421e-a104-caeb56db51b3", "address": "fa:16:3e:9a:d1:33", "network": {"id": "869979f7-5a22-4c11-bb77-c48a5d5f934f", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1534576533-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6d1aee7c44f44abc86ed5c15b027e989", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08e9585e-6186-4788-9fd9-24174ce45a6f", "external-id": "nsx-vlan-transportzone-254", "segmentation_id": 254, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0974798a-a1", "ovs_interfaceid": "0974798a-a146-421e-a104-caeb56db51b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1480.165346] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Releasing lock "refresh_cache-6199de01-baca-4461-9572-111eda11adac" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1480.165346] env[62405]: DEBUG nova.compute.manager [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] [instance: 6199de01-baca-4461-9572-111eda11adac] Instance network_info: |[{"id": "9cfd8f38-b13f-4aae-b836-8df8b8a50eb7", "address": "fa:16:3e:69:2e:ac", "network": {"id": "e27eb8f6-2757-46d5-aaee-55a231b6762e", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1167248497-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21cc9ecf7d5e4a5c80b8febb406cd6d6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e2e8b74b-aa27-4f31-9414-7bcf531e8642", "external-id": "nsx-vlan-transportzone-544", "segmentation_id": 544, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9cfd8f38-b1", "ovs_interfaceid": "9cfd8f38-b13f-4aae-b836-8df8b8a50eb7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1480.165483] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] [instance: 6199de01-baca-4461-9572-111eda11adac] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:69:2e:ac', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e2e8b74b-aa27-4f31-9414-7bcf531e8642', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9cfd8f38-b13f-4aae-b836-8df8b8a50eb7', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1480.173840] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Creating folder: Project (21cc9ecf7d5e4a5c80b8febb406cd6d6). Parent ref: group-v401284. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1480.175648] env[62405]: ERROR nova.scheduler.client.report [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [req-9b0ec734-e273-4fb0-9851-4ca20df0a062] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7d5eded7-a501-4fa6-b1d3-60e273d555d7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-9b0ec734-e273-4fb0-9851-4ca20df0a062"}]} [ 1480.176513] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-973454a7-4f58-4083-a570-3418d9966e6a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.191011] env[62405]: DEBUG oslo_vmware.api [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52a32f77-ce2f-f7e1-4a13-c27175125333, 'name': SearchDatastore_Task, 'duration_secs': 0.010392} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1480.191742] env[62405]: DEBUG oslo_concurrency.lockutils [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1480.191975] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 3c0b964f-c900-4704-ae12-7eba7952f678/3c0b964f-c900-4704-ae12-7eba7952f678.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1480.196778] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-34acc4cd-0d43-4bfc-bfe2-9e0c911c8a50 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.199494] env[62405]: DEBUG oslo_vmware.api [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Waiting for the task: (returnval){ [ 1480.199494] env[62405]: value = "task-1946700" [ 1480.199494] env[62405]: _type = "Task" [ 1480.199494] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1480.202478] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Created folder: Project (21cc9ecf7d5e4a5c80b8febb406cd6d6) in parent group-v401284. [ 1480.202478] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Creating folder: Instances. Parent ref: group-v401295. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1480.205713] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-27904559-d114-4d00-9dc5-a3f77c02df73 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.209165] env[62405]: DEBUG nova.scheduler.client.report [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Refreshing inventories for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1480.217306] env[62405]: DEBUG oslo_vmware.api [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Task: {'id': task-1946700, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1480.218939] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Created folder: Instances in parent group-v401295. [ 1480.218939] env[62405]: DEBUG oslo.service.loopingcall [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1480.218939] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6199de01-baca-4461-9572-111eda11adac] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1480.218939] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f6efa94a-b883-4d52-9966-b3fe3c5bcdfc {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.239788] env[62405]: DEBUG nova.scheduler.client.report [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Updating ProviderTree inventory for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 172, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1480.240020] env[62405]: DEBUG nova.compute.provider_tree [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 172, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1480.248885] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1480.248885] env[62405]: value = "task-1946702" [ 1480.248885] env[62405]: _type = "Task" [ 1480.248885] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1480.260815] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946702, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1480.261807] env[62405]: DEBUG nova.scheduler.client.report [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Refreshing aggregate associations for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7, aggregates: None {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1480.289251] env[62405]: DEBUG nova.scheduler.client.report [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Refreshing trait associations for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1480.351025] env[62405]: DEBUG nova.compute.manager [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] [instance: 0491dc4b-cf35-4035-aca9-baf43b86af7e] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1480.386509] env[62405]: DEBUG nova.virt.hardware [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1480.388351] env[62405]: DEBUG nova.virt.hardware [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1480.388748] env[62405]: DEBUG nova.virt.hardware [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1480.388748] env[62405]: DEBUG nova.virt.hardware [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1480.388920] env[62405]: DEBUG nova.virt.hardware [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1480.392017] env[62405]: DEBUG nova.virt.hardware [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1480.392017] env[62405]: DEBUG nova.virt.hardware [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1480.392017] env[62405]: DEBUG nova.virt.hardware [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1480.392017] env[62405]: DEBUG nova.virt.hardware [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1480.392017] env[62405]: DEBUG nova.virt.hardware [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1480.392324] env[62405]: DEBUG nova.virt.hardware [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1480.392324] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abf817a0-4795-4aa8-889c-f4870dcd798e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.405142] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94ed642b-be07-4311-83d2-9864cd6417d9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.515428] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6291430a-9bb6-47e8-b5ec-ad2299049b2d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.526263] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b47257f-3b1b-4b76-81c3-f6a45f1c18e0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.559959] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Releasing lock "refresh_cache-262424b0-dc7d-4b6c-9539-2d6cd23a93da" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1480.560367] env[62405]: DEBUG nova.compute.manager [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] [instance: 262424b0-dc7d-4b6c-9539-2d6cd23a93da] Instance network_info: |[{"id": "0974798a-a146-421e-a104-caeb56db51b3", "address": "fa:16:3e:9a:d1:33", "network": {"id": "869979f7-5a22-4c11-bb77-c48a5d5f934f", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1534576533-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6d1aee7c44f44abc86ed5c15b027e989", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08e9585e-6186-4788-9fd9-24174ce45a6f", "external-id": "nsx-vlan-transportzone-254", "segmentation_id": 254, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0974798a-a1", "ovs_interfaceid": "0974798a-a146-421e-a104-caeb56db51b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1480.561028] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] [instance: 262424b0-dc7d-4b6c-9539-2d6cd23a93da] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9a:d1:33', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '08e9585e-6186-4788-9fd9-24174ce45a6f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0974798a-a146-421e-a104-caeb56db51b3', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1480.569310] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Creating folder: Project (6d1aee7c44f44abc86ed5c15b027e989). Parent ref: group-v401284. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1480.570323] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8b7eb95-b2d8-4ab2-b5e8-5776e58655a3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.573442] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-127b8314-af04-473a-aca1-5477ad9fae43 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.584368] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67776f5a-6d87-4387-958f-c35d64d6a27f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.591910] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Created folder: Project (6d1aee7c44f44abc86ed5c15b027e989) in parent group-v401284. [ 1480.592139] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Creating folder: Instances. Parent ref: group-v401298. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1480.593101] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-212b73c3-326f-4ed4-b7be-e64ea54199a8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.606445] env[62405]: DEBUG nova.compute.provider_tree [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1480.618975] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Created folder: Instances in parent group-v401298. [ 1480.619354] env[62405]: DEBUG oslo.service.loopingcall [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1480.619622] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 262424b0-dc7d-4b6c-9539-2d6cd23a93da] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1480.619868] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cecbf6e6-34b3-4593-ad9f-28f8d89e7e98 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.642313] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1480.642313] env[62405]: value = "task-1946705" [ 1480.642313] env[62405]: _type = "Task" [ 1480.642313] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1480.652395] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946705, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1480.715412] env[62405]: DEBUG oslo_vmware.api [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Task: {'id': task-1946700, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1480.772270] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946702, 'name': CreateVM_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1480.775604] env[62405]: DEBUG nova.compute.manager [None req-aee168e5-86b0-48d9-95c1-aee4f8325f13 tempest-ServerDiagnosticsV248Test-516254139 tempest-ServerDiagnosticsV248Test-516254139-project-admin] [instance: 2257c786-54f9-441a-832c-cf3178bfcc78] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1480.777551] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-986fafba-1949-4496-8399-05418e3af1e1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.787494] env[62405]: INFO nova.compute.manager [None req-aee168e5-86b0-48d9-95c1-aee4f8325f13 tempest-ServerDiagnosticsV248Test-516254139 tempest-ServerDiagnosticsV248Test-516254139-project-admin] [instance: 2257c786-54f9-441a-832c-cf3178bfcc78] Retrieving diagnostics [ 1480.787908] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67f67a71-f65f-46ab-8aa9-1119fa4ef958 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.889810] env[62405]: DEBUG nova.network.neutron [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] [instance: 0491dc4b-cf35-4035-aca9-baf43b86af7e] Successfully created port: 19538d37-e369-4f7b-8051-61d2c0a7fb00 {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1481.159978] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946705, 'name': CreateVM_Task} progress is 25%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1481.163299] env[62405]: DEBUG nova.scheduler.client.report [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Updated inventory for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with generation 13 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1481.163566] env[62405]: DEBUG nova.compute.provider_tree [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Updating resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 generation from 13 to 14 during operation: update_inventory {{(pid=62405) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1481.163780] env[62405]: DEBUG nova.compute.provider_tree [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1481.219475] env[62405]: DEBUG oslo_vmware.api [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Task: {'id': task-1946700, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.634317} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1481.220128] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 3c0b964f-c900-4704-ae12-7eba7952f678/3c0b964f-c900-4704-ae12-7eba7952f678.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1481.220128] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] [instance: 3c0b964f-c900-4704-ae12-7eba7952f678] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1481.220273] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1c2c46f5-f500-4d0a-9b16-a7945535c6dd {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.228251] env[62405]: DEBUG oslo_vmware.api [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Waiting for the task: (returnval){ [ 1481.228251] env[62405]: value = "task-1946706" [ 1481.228251] env[62405]: _type = "Task" [ 1481.228251] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1481.242502] env[62405]: DEBUG oslo_vmware.api [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Task: {'id': task-1946706, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1481.262153] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946702, 'name': CreateVM_Task, 'duration_secs': 0.621109} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1481.262388] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6199de01-baca-4461-9572-111eda11adac] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1481.263225] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1481.266023] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1481.266023] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1481.266023] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d017ba6a-03ec-470b-8c9b-46ff18e95a4e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.273242] env[62405]: DEBUG oslo_vmware.api [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Waiting for the task: (returnval){ [ 1481.273242] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5232f4ae-b7a9-a794-f9c6-7f354ff7e191" [ 1481.273242] env[62405]: _type = "Task" [ 1481.273242] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1481.285544] env[62405]: DEBUG oslo_vmware.api [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5232f4ae-b7a9-a794-f9c6-7f354ff7e191, 'name': SearchDatastore_Task, 'duration_secs': 0.01165} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1481.285859] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1481.286136] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] [instance: 6199de01-baca-4461-9572-111eda11adac] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1481.286329] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1481.286469] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1481.286642] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1481.286897] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-83a914a5-fc54-41cd-84aa-067da3e495a4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.297250] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1481.297880] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1481.298346] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3df6629f-28b7-4ab0-954a-59717e6f7646 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.305952] env[62405]: DEBUG oslo_vmware.api [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Waiting for the task: (returnval){ [ 1481.305952] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52011a1c-4d19-2935-7bac-200275b2b067" [ 1481.305952] env[62405]: _type = "Task" [ 1481.305952] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1481.315898] env[62405]: DEBUG oslo_vmware.api [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52011a1c-4d19-2935-7bac-200275b2b067, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1481.653146] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946705, 'name': CreateVM_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1481.678735] env[62405]: DEBUG oslo_concurrency.lockutils [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.355s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1481.678735] env[62405]: DEBUG nova.compute.manager [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 058682a1-5240-4414-9203-c612ecd12999] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1481.681039] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.553s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1481.683823] env[62405]: INFO nova.compute.claims [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] [instance: 9b71f962-2b92-4f7b-bb8d-b50da5130018] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1481.729432] env[62405]: DEBUG nova.compute.manager [req-202ccaba-7878-49b4-b7ef-82179d43a62f req-f1ba64b6-866c-4f46-b88e-954a9dd718d9 service nova] [instance: 3c0b964f-c900-4704-ae12-7eba7952f678] Received event network-changed-ff5be597-1e44-4215-81eb-9129935b393c {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1481.729432] env[62405]: DEBUG nova.compute.manager [req-202ccaba-7878-49b4-b7ef-82179d43a62f req-f1ba64b6-866c-4f46-b88e-954a9dd718d9 service nova] [instance: 3c0b964f-c900-4704-ae12-7eba7952f678] Refreshing instance network info cache due to event network-changed-ff5be597-1e44-4215-81eb-9129935b393c. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1481.730347] env[62405]: DEBUG oslo_concurrency.lockutils [req-202ccaba-7878-49b4-b7ef-82179d43a62f req-f1ba64b6-866c-4f46-b88e-954a9dd718d9 service nova] Acquiring lock "refresh_cache-3c0b964f-c900-4704-ae12-7eba7952f678" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1481.731059] env[62405]: DEBUG oslo_concurrency.lockutils [req-202ccaba-7878-49b4-b7ef-82179d43a62f req-f1ba64b6-866c-4f46-b88e-954a9dd718d9 service nova] Acquired lock "refresh_cache-3c0b964f-c900-4704-ae12-7eba7952f678" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1481.731480] env[62405]: DEBUG nova.network.neutron [req-202ccaba-7878-49b4-b7ef-82179d43a62f req-f1ba64b6-866c-4f46-b88e-954a9dd718d9 service nova] [instance: 3c0b964f-c900-4704-ae12-7eba7952f678] Refreshing network info cache for port ff5be597-1e44-4215-81eb-9129935b393c {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1481.751234] env[62405]: DEBUG oslo_vmware.api [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Task: {'id': task-1946706, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.095569} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1481.753373] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] [instance: 3c0b964f-c900-4704-ae12-7eba7952f678] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1481.754738] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-159f8f2b-24fc-4c13-9dd8-c9e5d706220d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.788250] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] [instance: 3c0b964f-c900-4704-ae12-7eba7952f678] Reconfiguring VM instance instance-00000002 to attach disk [datastore1] 3c0b964f-c900-4704-ae12-7eba7952f678/3c0b964f-c900-4704-ae12-7eba7952f678.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1481.789035] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ff5ea17e-3096-4cea-b7aa-b7f01a3eb89f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.830989] env[62405]: DEBUG oslo_vmware.api [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52011a1c-4d19-2935-7bac-200275b2b067, 'name': SearchDatastore_Task, 'duration_secs': 0.011587} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1481.832962] env[62405]: DEBUG oslo_vmware.api [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Waiting for the task: (returnval){ [ 1481.832962] env[62405]: value = "task-1946708" [ 1481.832962] env[62405]: _type = "Task" [ 1481.832962] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1481.833168] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-09ff8d7c-1a86-4e4c-b233-14f2050ef980 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.852072] env[62405]: DEBUG oslo_vmware.api [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Task: {'id': task-1946708, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1481.852464] env[62405]: DEBUG oslo_vmware.api [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Waiting for the task: (returnval){ [ 1481.852464] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52dc2d63-0cde-215a-52ff-51be6fc7e027" [ 1481.852464] env[62405]: _type = "Task" [ 1481.852464] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1481.860986] env[62405]: DEBUG oslo_vmware.api [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52dc2d63-0cde-215a-52ff-51be6fc7e027, 'name': SearchDatastore_Task, 'duration_secs': 0.009389} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1481.861518] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1481.861807] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 6199de01-baca-4461-9572-111eda11adac/6199de01-baca-4461-9572-111eda11adac.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1481.865015] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f9c13f13-e8d8-47c3-9302-eb74f02d57b8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.869065] env[62405]: DEBUG oslo_vmware.api [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Waiting for the task: (returnval){ [ 1481.869065] env[62405]: value = "task-1946709" [ 1481.869065] env[62405]: _type = "Task" [ 1481.869065] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1481.877234] env[62405]: DEBUG oslo_vmware.api [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Task: {'id': task-1946709, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1482.155598] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946705, 'name': CreateVM_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1482.193032] env[62405]: DEBUG nova.compute.utils [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1482.195759] env[62405]: DEBUG nova.compute.manager [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 058682a1-5240-4414-9203-c612ecd12999] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1482.195759] env[62405]: DEBUG nova.network.neutron [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 058682a1-5240-4414-9203-c612ecd12999] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1482.350357] env[62405]: DEBUG oslo_vmware.api [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Task: {'id': task-1946708, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1482.380903] env[62405]: DEBUG oslo_vmware.api [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Task: {'id': task-1946709, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1482.382366] env[62405]: DEBUG nova.policy [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1bea5fa632f74543a680f69edf3c05ff', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e3cd6b7f1ce346e98fe8bff2423f34ab', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1482.561872] env[62405]: DEBUG nova.network.neutron [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 8624629d-642a-4adf-984e-3925beeb4fef] Successfully updated port: 15d03dd1-4edd-413d-a67d-3c877a40692a {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1482.666404] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946705, 'name': CreateVM_Task, 'duration_secs': 1.741484} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1482.666776] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 262424b0-dc7d-4b6c-9539-2d6cd23a93da] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1482.671112] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1482.671112] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1482.671112] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1482.671112] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8e7e5093-2749-4b86-ad45-e44602ad7e51 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.681095] env[62405]: DEBUG oslo_vmware.api [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Waiting for the task: (returnval){ [ 1482.681095] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52dc31e0-eaa5-a3e9-1083-b8d9e9c931bd" [ 1482.681095] env[62405]: _type = "Task" [ 1482.681095] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1482.700550] env[62405]: DEBUG oslo_vmware.api [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52dc31e0-eaa5-a3e9-1083-b8d9e9c931bd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1482.705617] env[62405]: DEBUG nova.compute.manager [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 058682a1-5240-4414-9203-c612ecd12999] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1482.854265] env[62405]: DEBUG oslo_vmware.api [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Task: {'id': task-1946708, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1482.886865] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Acquiring lock "7db1b086-942e-4890-8750-0d717e522786" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1482.888042] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Lock "7db1b086-942e-4890-8750-0d717e522786" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1482.897507] env[62405]: DEBUG oslo_vmware.api [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Task: {'id': task-1946709, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.913531} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1482.897507] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 6199de01-baca-4461-9572-111eda11adac/6199de01-baca-4461-9572-111eda11adac.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1482.897507] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] [instance: 6199de01-baca-4461-9572-111eda11adac] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1482.897507] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5d6b4e80-ea37-4e65-a23e-1c129304650c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.909322] env[62405]: DEBUG oslo_vmware.api [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Waiting for the task: (returnval){ [ 1482.909322] env[62405]: value = "task-1946710" [ 1482.909322] env[62405]: _type = "Task" [ 1482.909322] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1482.921574] env[62405]: DEBUG oslo_vmware.api [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Task: {'id': task-1946710, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1482.990806] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4723c663-cef7-4714-ac50-4a797836d2ab {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.004155] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddb0c05c-72fc-49e3-ad8b-cfa8f0ae6e75 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.052154] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84de9952-f4be-4359-9445-45857ad75d1f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.060290] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4af140d-ae6c-4b52-9292-ef66356a3682 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.066500] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquiring lock "refresh_cache-8624629d-642a-4adf-984e-3925beeb4fef" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1483.066655] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquired lock "refresh_cache-8624629d-642a-4adf-984e-3925beeb4fef" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1483.067029] env[62405]: DEBUG nova.network.neutron [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 8624629d-642a-4adf-984e-3925beeb4fef] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1483.078062] env[62405]: DEBUG nova.compute.provider_tree [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 172, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1483.192916] env[62405]: DEBUG oslo_vmware.api [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52dc31e0-eaa5-a3e9-1083-b8d9e9c931bd, 'name': SearchDatastore_Task, 'duration_secs': 0.048703} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1483.193259] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1483.193507] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] [instance: 262424b0-dc7d-4b6c-9539-2d6cd23a93da] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1483.193821] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1483.194040] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1483.194122] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1483.194344] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-41ebd24f-fb40-4447-8a9b-6c9ec717c95d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.210064] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1483.210189] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1483.216168] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9a10fe89-9aea-466d-84e5-343b3d1cec4a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.223879] env[62405]: DEBUG oslo_vmware.api [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Waiting for the task: (returnval){ [ 1483.223879] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52208167-e923-41f1-bef1-2a368e05d11b" [ 1483.223879] env[62405]: _type = "Task" [ 1483.223879] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1483.234394] env[62405]: DEBUG oslo_vmware.api [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52208167-e923-41f1-bef1-2a368e05d11b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1483.258018] env[62405]: DEBUG nova.compute.manager [req-d91c3274-6b56-46b0-8d88-a6bbe1cd7b68 req-2316660e-0167-4dc7-a3dc-a82cb39b6c24 service nova] [instance: 6199de01-baca-4461-9572-111eda11adac] Received event network-changed-9cfd8f38-b13f-4aae-b836-8df8b8a50eb7 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1483.258018] env[62405]: DEBUG nova.compute.manager [req-d91c3274-6b56-46b0-8d88-a6bbe1cd7b68 req-2316660e-0167-4dc7-a3dc-a82cb39b6c24 service nova] [instance: 6199de01-baca-4461-9572-111eda11adac] Refreshing instance network info cache due to event network-changed-9cfd8f38-b13f-4aae-b836-8df8b8a50eb7. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1483.258018] env[62405]: DEBUG oslo_concurrency.lockutils [req-d91c3274-6b56-46b0-8d88-a6bbe1cd7b68 req-2316660e-0167-4dc7-a3dc-a82cb39b6c24 service nova] Acquiring lock "refresh_cache-6199de01-baca-4461-9572-111eda11adac" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1483.258018] env[62405]: DEBUG oslo_concurrency.lockutils [req-d91c3274-6b56-46b0-8d88-a6bbe1cd7b68 req-2316660e-0167-4dc7-a3dc-a82cb39b6c24 service nova] Acquired lock "refresh_cache-6199de01-baca-4461-9572-111eda11adac" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1483.258616] env[62405]: DEBUG nova.network.neutron [req-d91c3274-6b56-46b0-8d88-a6bbe1cd7b68 req-2316660e-0167-4dc7-a3dc-a82cb39b6c24 service nova] [instance: 6199de01-baca-4461-9572-111eda11adac] Refreshing network info cache for port 9cfd8f38-b13f-4aae-b836-8df8b8a50eb7 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1483.302033] env[62405]: DEBUG nova.network.neutron [req-202ccaba-7878-49b4-b7ef-82179d43a62f req-f1ba64b6-866c-4f46-b88e-954a9dd718d9 service nova] [instance: 3c0b964f-c900-4704-ae12-7eba7952f678] Updated VIF entry in instance network info cache for port ff5be597-1e44-4215-81eb-9129935b393c. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1483.302682] env[62405]: DEBUG nova.network.neutron [req-202ccaba-7878-49b4-b7ef-82179d43a62f req-f1ba64b6-866c-4f46-b88e-954a9dd718d9 service nova] [instance: 3c0b964f-c900-4704-ae12-7eba7952f678] Updating instance_info_cache with network_info: [{"id": "ff5be597-1e44-4215-81eb-9129935b393c", "address": "fa:16:3e:17:77:42", "network": {"id": "9e3e6e3d-df77-428f-b577-e4a42e82ec43", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.35", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "69dc3a146cd14230b1180689e2fea090", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31e77685-b4dd-4810-80ef-24115ea9ea62", "external-id": "nsx-vlan-transportzone-56", "segmentation_id": 56, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff5be597-1e", "ovs_interfaceid": "ff5be597-1e44-4215-81eb-9129935b393c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1483.351021] env[62405]: DEBUG oslo_vmware.api [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Task: {'id': task-1946708, 'name': ReconfigVM_Task, 'duration_secs': 1.07678} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1483.351342] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] [instance: 3c0b964f-c900-4704-ae12-7eba7952f678] Reconfigured VM instance instance-00000002 to attach disk [datastore1] 3c0b964f-c900-4704-ae12-7eba7952f678/3c0b964f-c900-4704-ae12-7eba7952f678.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1483.351988] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e1db75c5-4457-43ad-9544-39bdb64d1da6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.361187] env[62405]: DEBUG oslo_vmware.api [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Waiting for the task: (returnval){ [ 1483.361187] env[62405]: value = "task-1946711" [ 1483.361187] env[62405]: _type = "Task" [ 1483.361187] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1483.370918] env[62405]: DEBUG oslo_vmware.api [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Task: {'id': task-1946711, 'name': Rename_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1483.389447] env[62405]: DEBUG nova.compute.manager [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] [instance: 7db1b086-942e-4890-8750-0d717e522786] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1483.420768] env[62405]: DEBUG oslo_vmware.api [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Task: {'id': task-1946710, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075646} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1483.421539] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] [instance: 6199de01-baca-4461-9572-111eda11adac] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1483.421907] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a841ffa5-77e2-40d9-aab4-85384b3b7083 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.446698] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] [instance: 6199de01-baca-4461-9572-111eda11adac] Reconfiguring VM instance instance-00000003 to attach disk [datastore1] 6199de01-baca-4461-9572-111eda11adac/6199de01-baca-4461-9572-111eda11adac.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1483.447801] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-82fa2392-21e7-46b2-960c-bec9d367dc1e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.469519] env[62405]: DEBUG oslo_vmware.api [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Waiting for the task: (returnval){ [ 1483.469519] env[62405]: value = "task-1946713" [ 1483.469519] env[62405]: _type = "Task" [ 1483.469519] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1483.479638] env[62405]: DEBUG oslo_vmware.api [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Task: {'id': task-1946713, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1483.613633] env[62405]: ERROR nova.scheduler.client.report [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] [req-38ce2098-6b18-4b1c-988e-24faba09d733] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 172, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7d5eded7-a501-4fa6-b1d3-60e273d555d7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-38ce2098-6b18-4b1c-988e-24faba09d733"}]} [ 1483.634748] env[62405]: DEBUG nova.scheduler.client.report [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Refreshing inventories for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1483.652716] env[62405]: DEBUG nova.scheduler.client.report [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Updating ProviderTree inventory for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1483.652956] env[62405]: DEBUG nova.compute.provider_tree [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1483.669418] env[62405]: DEBUG nova.scheduler.client.report [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Refreshing aggregate associations for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7, aggregates: None {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1483.692580] env[62405]: DEBUG nova.scheduler.client.report [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Refreshing trait associations for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1483.701685] env[62405]: DEBUG nova.network.neutron [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 8624629d-642a-4adf-984e-3925beeb4fef] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1483.722951] env[62405]: DEBUG nova.compute.manager [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 058682a1-5240-4414-9203-c612ecd12999] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1483.735740] env[62405]: DEBUG oslo_vmware.api [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52208167-e923-41f1-bef1-2a368e05d11b, 'name': SearchDatastore_Task, 'duration_secs': 0.013548} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1483.740832] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f8fbe2c2-0e28-4068-a42a-ef9e3336f11f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.747742] env[62405]: DEBUG oslo_vmware.api [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Waiting for the task: (returnval){ [ 1483.747742] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]522a7c71-f5a6-e56c-6695-9ae92c04b420" [ 1483.747742] env[62405]: _type = "Task" [ 1483.747742] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1483.755980] env[62405]: DEBUG nova.virt.hardware [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1483.756252] env[62405]: DEBUG nova.virt.hardware [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1483.756421] env[62405]: DEBUG nova.virt.hardware [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1483.756686] env[62405]: DEBUG nova.virt.hardware [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1483.756749] env[62405]: DEBUG nova.virt.hardware [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1483.756888] env[62405]: DEBUG nova.virt.hardware [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1483.757608] env[62405]: DEBUG nova.virt.hardware [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1483.757774] env[62405]: DEBUG nova.virt.hardware [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1483.757957] env[62405]: DEBUG nova.virt.hardware [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1483.758135] env[62405]: DEBUG nova.virt.hardware [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1483.758312] env[62405]: DEBUG nova.virt.hardware [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1483.759093] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b69f4a1-b9fb-4a18-bb95-fb9e77b61022 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.771595] env[62405]: DEBUG oslo_vmware.api [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]522a7c71-f5a6-e56c-6695-9ae92c04b420, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1483.778961] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b764b6f9-0ee2-45a3-8bda-26f02e1a8cfe {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.805455] env[62405]: DEBUG oslo_concurrency.lockutils [req-202ccaba-7878-49b4-b7ef-82179d43a62f req-f1ba64b6-866c-4f46-b88e-954a9dd718d9 service nova] Releasing lock "refresh_cache-3c0b964f-c900-4704-ae12-7eba7952f678" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1483.805752] env[62405]: DEBUG nova.compute.manager [req-202ccaba-7878-49b4-b7ef-82179d43a62f req-f1ba64b6-866c-4f46-b88e-954a9dd718d9 service nova] [instance: 262424b0-dc7d-4b6c-9539-2d6cd23a93da] Received event network-vif-plugged-0974798a-a146-421e-a104-caeb56db51b3 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1483.806687] env[62405]: DEBUG oslo_concurrency.lockutils [req-202ccaba-7878-49b4-b7ef-82179d43a62f req-f1ba64b6-866c-4f46-b88e-954a9dd718d9 service nova] Acquiring lock "262424b0-dc7d-4b6c-9539-2d6cd23a93da-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1483.806905] env[62405]: DEBUG oslo_concurrency.lockutils [req-202ccaba-7878-49b4-b7ef-82179d43a62f req-f1ba64b6-866c-4f46-b88e-954a9dd718d9 service nova] Lock "262424b0-dc7d-4b6c-9539-2d6cd23a93da-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1483.807094] env[62405]: DEBUG oslo_concurrency.lockutils [req-202ccaba-7878-49b4-b7ef-82179d43a62f req-f1ba64b6-866c-4f46-b88e-954a9dd718d9 service nova] Lock "262424b0-dc7d-4b6c-9539-2d6cd23a93da-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1483.807270] env[62405]: DEBUG nova.compute.manager [req-202ccaba-7878-49b4-b7ef-82179d43a62f req-f1ba64b6-866c-4f46-b88e-954a9dd718d9 service nova] [instance: 262424b0-dc7d-4b6c-9539-2d6cd23a93da] No waiting events found dispatching network-vif-plugged-0974798a-a146-421e-a104-caeb56db51b3 {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1483.807474] env[62405]: WARNING nova.compute.manager [req-202ccaba-7878-49b4-b7ef-82179d43a62f req-f1ba64b6-866c-4f46-b88e-954a9dd718d9 service nova] [instance: 262424b0-dc7d-4b6c-9539-2d6cd23a93da] Received unexpected event network-vif-plugged-0974798a-a146-421e-a104-caeb56db51b3 for instance with vm_state building and task_state spawning. [ 1483.807626] env[62405]: DEBUG nova.compute.manager [req-202ccaba-7878-49b4-b7ef-82179d43a62f req-f1ba64b6-866c-4f46-b88e-954a9dd718d9 service nova] [instance: 262424b0-dc7d-4b6c-9539-2d6cd23a93da] Received event network-changed-0974798a-a146-421e-a104-caeb56db51b3 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1483.807768] env[62405]: DEBUG nova.compute.manager [req-202ccaba-7878-49b4-b7ef-82179d43a62f req-f1ba64b6-866c-4f46-b88e-954a9dd718d9 service nova] [instance: 262424b0-dc7d-4b6c-9539-2d6cd23a93da] Refreshing instance network info cache due to event network-changed-0974798a-a146-421e-a104-caeb56db51b3. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1483.807968] env[62405]: DEBUG oslo_concurrency.lockutils [req-202ccaba-7878-49b4-b7ef-82179d43a62f req-f1ba64b6-866c-4f46-b88e-954a9dd718d9 service nova] Acquiring lock "refresh_cache-262424b0-dc7d-4b6c-9539-2d6cd23a93da" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1483.808115] env[62405]: DEBUG oslo_concurrency.lockutils [req-202ccaba-7878-49b4-b7ef-82179d43a62f req-f1ba64b6-866c-4f46-b88e-954a9dd718d9 service nova] Acquired lock "refresh_cache-262424b0-dc7d-4b6c-9539-2d6cd23a93da" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1483.809405] env[62405]: DEBUG nova.network.neutron [req-202ccaba-7878-49b4-b7ef-82179d43a62f req-f1ba64b6-866c-4f46-b88e-954a9dd718d9 service nova] [instance: 262424b0-dc7d-4b6c-9539-2d6cd23a93da] Refreshing network info cache for port 0974798a-a146-421e-a104-caeb56db51b3 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1483.878721] env[62405]: DEBUG oslo_vmware.api [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Task: {'id': task-1946711, 'name': Rename_Task, 'duration_secs': 0.248497} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1483.880125] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] [instance: 3c0b964f-c900-4704-ae12-7eba7952f678] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1483.880383] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4abb57a8-c6ef-4fd2-a901-3546eaf10c76 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.884012] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20d80ec9-9c54-4834-96fd-175af3d81da7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.888174] env[62405]: DEBUG oslo_vmware.api [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Waiting for the task: (returnval){ [ 1483.888174] env[62405]: value = "task-1946714" [ 1483.888174] env[62405]: _type = "Task" [ 1483.888174] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1483.900756] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df793525-b049-4c08-b712-496b10a0a96a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.908465] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1483.909188] env[62405]: DEBUG oslo_vmware.api [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Task: {'id': task-1946714, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1483.944058] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1483.945243] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faf038e3-fd49-494a-8e36-26ce15a85d75 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.954895] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f975d5ec-0be0-4f95-b2af-91c2ef3ad8a6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.973118] env[62405]: DEBUG nova.compute.provider_tree [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 172, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1483.986326] env[62405]: DEBUG oslo_vmware.api [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Task: {'id': task-1946713, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1484.070167] env[62405]: DEBUG nova.network.neutron [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 058682a1-5240-4414-9203-c612ecd12999] Successfully created port: f2f99aa3-770a-41cb-bb49-775f9f0f2708 {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1484.190584] env[62405]: DEBUG nova.network.neutron [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] [instance: 0491dc4b-cf35-4035-aca9-baf43b86af7e] Successfully updated port: 19538d37-e369-4f7b-8051-61d2c0a7fb00 {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1484.267541] env[62405]: DEBUG oslo_vmware.api [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]522a7c71-f5a6-e56c-6695-9ae92c04b420, 'name': SearchDatastore_Task, 'duration_secs': 0.024381} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1484.268180] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1484.268180] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 262424b0-dc7d-4b6c-9539-2d6cd23a93da/262424b0-dc7d-4b6c-9539-2d6cd23a93da.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1484.268180] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c6754c05-7074-410f-ac35-e67388a41015 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.278882] env[62405]: DEBUG oslo_vmware.api [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Waiting for the task: (returnval){ [ 1484.278882] env[62405]: value = "task-1946715" [ 1484.278882] env[62405]: _type = "Task" [ 1484.278882] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1484.288708] env[62405]: DEBUG oslo_vmware.api [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Task: {'id': task-1946715, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1484.404726] env[62405]: DEBUG oslo_vmware.api [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Task: {'id': task-1946714, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1484.484312] env[62405]: DEBUG oslo_vmware.api [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Task: {'id': task-1946713, 'name': ReconfigVM_Task, 'duration_secs': 0.516757} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1484.484577] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] [instance: 6199de01-baca-4461-9572-111eda11adac] Reconfigured VM instance instance-00000003 to attach disk [datastore1] 6199de01-baca-4461-9572-111eda11adac/6199de01-baca-4461-9572-111eda11adac.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1484.485355] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c4ddc8fe-58e4-4563-a497-d740548ab567 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.493125] env[62405]: DEBUG oslo_vmware.api [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Waiting for the task: (returnval){ [ 1484.493125] env[62405]: value = "task-1946716" [ 1484.493125] env[62405]: _type = "Task" [ 1484.493125] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1484.502113] env[62405]: DEBUG oslo_vmware.api [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Task: {'id': task-1946716, 'name': Rename_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1484.504414] env[62405]: DEBUG nova.network.neutron [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 8624629d-642a-4adf-984e-3925beeb4fef] Updating instance_info_cache with network_info: [{"id": "15d03dd1-4edd-413d-a67d-3c877a40692a", "address": "fa:16:3e:f3:04:d2", "network": {"id": "7398b956-045a-440c-b8fc-34bad57dbc27", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1969082667-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "41626e27199f4370a2554bb243a72d41", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "50171613-b419-45e3-9ada-fcb6cd921428", "external-id": "nsx-vlan-transportzone-914", "segmentation_id": 914, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap15d03dd1-4e", "ovs_interfaceid": "15d03dd1-4edd-413d-a67d-3c877a40692a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1484.524888] env[62405]: DEBUG nova.scheduler.client.report [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Updated inventory for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with generation 15 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 172, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1484.525198] env[62405]: DEBUG nova.compute.provider_tree [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Updating resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 generation from 15 to 16 during operation: update_inventory {{(pid=62405) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1484.525392] env[62405]: DEBUG nova.compute.provider_tree [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 172, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1484.695308] env[62405]: DEBUG oslo_concurrency.lockutils [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Acquiring lock "refresh_cache-0491dc4b-cf35-4035-aca9-baf43b86af7e" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1484.695308] env[62405]: DEBUG oslo_concurrency.lockutils [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Acquired lock "refresh_cache-0491dc4b-cf35-4035-aca9-baf43b86af7e" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1484.695308] env[62405]: DEBUG nova.network.neutron [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] [instance: 0491dc4b-cf35-4035-aca9-baf43b86af7e] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1484.794971] env[62405]: DEBUG oslo_vmware.api [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Task: {'id': task-1946715, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1484.905939] env[62405]: DEBUG oslo_vmware.api [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Task: {'id': task-1946714, 'name': PowerOnVM_Task, 'duration_secs': 0.561205} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1484.909619] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] [instance: 3c0b964f-c900-4704-ae12-7eba7952f678] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1484.913114] env[62405]: INFO nova.compute.manager [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] [instance: 3c0b964f-c900-4704-ae12-7eba7952f678] Took 14.12 seconds to spawn the instance on the hypervisor. [ 1484.913114] env[62405]: DEBUG nova.compute.manager [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] [instance: 3c0b964f-c900-4704-ae12-7eba7952f678] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1484.913114] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b638d834-c7ac-4b54-b98f-b6937df46d81 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.007199] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Releasing lock "refresh_cache-8624629d-642a-4adf-984e-3925beeb4fef" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1485.007516] env[62405]: DEBUG nova.compute.manager [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 8624629d-642a-4adf-984e-3925beeb4fef] Instance network_info: |[{"id": "15d03dd1-4edd-413d-a67d-3c877a40692a", "address": "fa:16:3e:f3:04:d2", "network": {"id": "7398b956-045a-440c-b8fc-34bad57dbc27", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1969082667-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "41626e27199f4370a2554bb243a72d41", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "50171613-b419-45e3-9ada-fcb6cd921428", "external-id": "nsx-vlan-transportzone-914", "segmentation_id": 914, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap15d03dd1-4e", "ovs_interfaceid": "15d03dd1-4edd-413d-a67d-3c877a40692a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1485.008144] env[62405]: DEBUG oslo_vmware.api [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Task: {'id': task-1946716, 'name': Rename_Task, 'duration_secs': 0.470476} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1485.008687] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 8624629d-642a-4adf-984e-3925beeb4fef] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f3:04:d2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '50171613-b419-45e3-9ada-fcb6cd921428', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '15d03dd1-4edd-413d-a67d-3c877a40692a', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1485.018367] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Creating folder: Project (41626e27199f4370a2554bb243a72d41). Parent ref: group-v401284. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1485.018689] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] [instance: 6199de01-baca-4461-9572-111eda11adac] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1485.018952] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-617feb6a-f858-4dec-8de0-5c1c316bed48 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.022077] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7cc71c22-f3db-48e5-8192-3a0b777b833a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.027240] env[62405]: DEBUG nova.network.neutron [req-d91c3274-6b56-46b0-8d88-a6bbe1cd7b68 req-2316660e-0167-4dc7-a3dc-a82cb39b6c24 service nova] [instance: 6199de01-baca-4461-9572-111eda11adac] Updated VIF entry in instance network info cache for port 9cfd8f38-b13f-4aae-b836-8df8b8a50eb7. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1485.027497] env[62405]: DEBUG nova.network.neutron [req-d91c3274-6b56-46b0-8d88-a6bbe1cd7b68 req-2316660e-0167-4dc7-a3dc-a82cb39b6c24 service nova] [instance: 6199de01-baca-4461-9572-111eda11adac] Updating instance_info_cache with network_info: [{"id": "9cfd8f38-b13f-4aae-b836-8df8b8a50eb7", "address": "fa:16:3e:69:2e:ac", "network": {"id": "e27eb8f6-2757-46d5-aaee-55a231b6762e", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1167248497-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21cc9ecf7d5e4a5c80b8febb406cd6d6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e2e8b74b-aa27-4f31-9414-7bcf531e8642", "external-id": "nsx-vlan-transportzone-544", "segmentation_id": 544, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9cfd8f38-b1", "ovs_interfaceid": "9cfd8f38-b13f-4aae-b836-8df8b8a50eb7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1485.033539] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.352s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1485.033539] env[62405]: DEBUG nova.compute.manager [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] [instance: 9b71f962-2b92-4f7b-bb8d-b50da5130018] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1485.037251] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.297s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1485.039036] env[62405]: INFO nova.compute.claims [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] [instance: 801e7086-5742-4a04-962c-7546284aa12d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1485.042341] env[62405]: DEBUG oslo_vmware.api [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Waiting for the task: (returnval){ [ 1485.042341] env[62405]: value = "task-1946718" [ 1485.042341] env[62405]: _type = "Task" [ 1485.042341] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1485.043829] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Created folder: Project (41626e27199f4370a2554bb243a72d41) in parent group-v401284. [ 1485.044041] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Creating folder: Instances. Parent ref: group-v401301. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1485.049119] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b3faffc7-0db9-40f8-8f0f-dc7f977496b3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.058219] env[62405]: DEBUG oslo_vmware.api [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Task: {'id': task-1946718, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1485.062978] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Created folder: Instances in parent group-v401301. [ 1485.063343] env[62405]: DEBUG oslo.service.loopingcall [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1485.063438] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8624629d-642a-4adf-984e-3925beeb4fef] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1485.063761] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1e730650-37f3-4372-bd21-6b68ed109535 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.088272] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1485.088272] env[62405]: value = "task-1946720" [ 1485.088272] env[62405]: _type = "Task" [ 1485.088272] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1485.098635] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946720, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1485.239140] env[62405]: DEBUG nova.network.neutron [req-202ccaba-7878-49b4-b7ef-82179d43a62f req-f1ba64b6-866c-4f46-b88e-954a9dd718d9 service nova] [instance: 262424b0-dc7d-4b6c-9539-2d6cd23a93da] Updated VIF entry in instance network info cache for port 0974798a-a146-421e-a104-caeb56db51b3. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1485.239547] env[62405]: DEBUG nova.network.neutron [req-202ccaba-7878-49b4-b7ef-82179d43a62f req-f1ba64b6-866c-4f46-b88e-954a9dd718d9 service nova] [instance: 262424b0-dc7d-4b6c-9539-2d6cd23a93da] Updating instance_info_cache with network_info: [{"id": "0974798a-a146-421e-a104-caeb56db51b3", "address": "fa:16:3e:9a:d1:33", "network": {"id": "869979f7-5a22-4c11-bb77-c48a5d5f934f", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1534576533-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6d1aee7c44f44abc86ed5c15b027e989", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08e9585e-6186-4788-9fd9-24174ce45a6f", "external-id": "nsx-vlan-transportzone-254", "segmentation_id": 254, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0974798a-a1", "ovs_interfaceid": "0974798a-a146-421e-a104-caeb56db51b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1485.292295] env[62405]: DEBUG oslo_vmware.api [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Task: {'id': task-1946715, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1485.293528] env[62405]: DEBUG nova.network.neutron [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] [instance: 0491dc4b-cf35-4035-aca9-baf43b86af7e] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1485.433093] env[62405]: INFO nova.compute.manager [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] [instance: 3c0b964f-c900-4704-ae12-7eba7952f678] Took 20.39 seconds to build instance. [ 1485.532935] env[62405]: DEBUG oslo_concurrency.lockutils [req-d91c3274-6b56-46b0-8d88-a6bbe1cd7b68 req-2316660e-0167-4dc7-a3dc-a82cb39b6c24 service nova] Releasing lock "refresh_cache-6199de01-baca-4461-9572-111eda11adac" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1485.546778] env[62405]: DEBUG nova.compute.utils [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1485.551937] env[62405]: DEBUG nova.compute.manager [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] [instance: 9b71f962-2b92-4f7b-bb8d-b50da5130018] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1485.551937] env[62405]: DEBUG nova.network.neutron [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] [instance: 9b71f962-2b92-4f7b-bb8d-b50da5130018] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1485.567881] env[62405]: DEBUG oslo_vmware.api [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Task: {'id': task-1946718, 'name': PowerOnVM_Task} progress is 1%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1485.605429] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946720, 'name': CreateVM_Task, 'duration_secs': 0.488531} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1485.605429] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8624629d-642a-4adf-984e-3925beeb4fef] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1485.605681] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1485.605827] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1485.606180] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1485.606470] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f5274198-1690-457f-908b-30b6177ad813 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.613573] env[62405]: DEBUG oslo_vmware.api [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for the task: (returnval){ [ 1485.613573] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]521155a9-87ba-5757-a243-650b982210b4" [ 1485.613573] env[62405]: _type = "Task" [ 1485.613573] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1485.623341] env[62405]: DEBUG oslo_vmware.api [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]521155a9-87ba-5757-a243-650b982210b4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1485.743097] env[62405]: DEBUG oslo_concurrency.lockutils [req-202ccaba-7878-49b4-b7ef-82179d43a62f req-f1ba64b6-866c-4f46-b88e-954a9dd718d9 service nova] Releasing lock "refresh_cache-262424b0-dc7d-4b6c-9539-2d6cd23a93da" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1485.793046] env[62405]: DEBUG oslo_vmware.api [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Task: {'id': task-1946715, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1485.811965] env[62405]: DEBUG nova.policy [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '394d3b5aab6b4d60a7ab94d6bb9c1273', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '37e71f387ca845b99564479baf7a9012', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1485.935402] env[62405]: DEBUG oslo_concurrency.lockutils [None req-eb110df3-f75b-4c1b-849a-4b5b62149ab8 tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Lock "3c0b964f-c900-4704-ae12-7eba7952f678" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.902s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1486.028282] env[62405]: DEBUG nova.network.neutron [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] [instance: 0491dc4b-cf35-4035-aca9-baf43b86af7e] Updating instance_info_cache with network_info: [{"id": "19538d37-e369-4f7b-8051-61d2c0a7fb00", "address": "fa:16:3e:62:6a:7a", "network": {"id": "bf574ed2-2a7e-4cf2-aa38-0adccf456674", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-2099360932-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8cf1f39c8aef41df8c86777f80980664", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60567ee6-01d0-4b16-9c7a-4a896827d6eb", "external-id": "nsx-vlan-transportzone-28", "segmentation_id": 28, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap19538d37-e3", "ovs_interfaceid": "19538d37-e369-4f7b-8051-61d2c0a7fb00", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1486.052883] env[62405]: DEBUG nova.compute.manager [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] [instance: 9b71f962-2b92-4f7b-bb8d-b50da5130018] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1486.074907] env[62405]: DEBUG oslo_vmware.api [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Task: {'id': task-1946718, 'name': PowerOnVM_Task} progress is 64%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1486.132599] env[62405]: DEBUG oslo_vmware.api [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]521155a9-87ba-5757-a243-650b982210b4, 'name': SearchDatastore_Task, 'duration_secs': 0.013603} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1486.132599] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1486.132599] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 8624629d-642a-4adf-984e-3925beeb4fef] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1486.132599] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1486.132872] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1486.132872] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1486.133369] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e689f101-13cb-480d-90e2-44d0a7345ec4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.148231] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1486.148231] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1486.148448] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-40437f3f-ff6b-4b5f-9e69-bb001d5801d8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.166255] env[62405]: DEBUG oslo_vmware.api [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for the task: (returnval){ [ 1486.166255] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52f0f912-b931-d107-44b7-ff6a7d805aaa" [ 1486.166255] env[62405]: _type = "Task" [ 1486.166255] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1486.195954] env[62405]: DEBUG oslo_vmware.api [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52f0f912-b931-d107-44b7-ff6a7d805aaa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1486.294919] env[62405]: DEBUG oslo_vmware.api [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Task: {'id': task-1946715, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.670519} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1486.294919] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 262424b0-dc7d-4b6c-9539-2d6cd23a93da/262424b0-dc7d-4b6c-9539-2d6cd23a93da.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1486.294919] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] [instance: 262424b0-dc7d-4b6c-9539-2d6cd23a93da] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1486.295102] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f38d4797-3062-4dd8-b5aa-8f437b27c142 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.303118] env[62405]: DEBUG oslo_vmware.api [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Waiting for the task: (returnval){ [ 1486.303118] env[62405]: value = "task-1946722" [ 1486.303118] env[62405]: _type = "Task" [ 1486.303118] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1486.304992] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7192677f-de34-4ed4-bd2f-7974b36e3f82 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.321351] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32839a49-5a0b-46dc-9052-c8100a33f7dd {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.325635] env[62405]: DEBUG oslo_vmware.api [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Task: {'id': task-1946722, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1486.360692] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d1af9f8-27d5-49d5-9189-8d6fed49eef2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.368933] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5550d946-0977-451a-8b01-2d80927b6120 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.383224] env[62405]: DEBUG nova.compute.provider_tree [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1486.533938] env[62405]: DEBUG oslo_concurrency.lockutils [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Releasing lock "refresh_cache-0491dc4b-cf35-4035-aca9-baf43b86af7e" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1486.534342] env[62405]: DEBUG nova.compute.manager [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] [instance: 0491dc4b-cf35-4035-aca9-baf43b86af7e] Instance network_info: |[{"id": "19538d37-e369-4f7b-8051-61d2c0a7fb00", "address": "fa:16:3e:62:6a:7a", "network": {"id": "bf574ed2-2a7e-4cf2-aa38-0adccf456674", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-2099360932-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8cf1f39c8aef41df8c86777f80980664", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60567ee6-01d0-4b16-9c7a-4a896827d6eb", "external-id": "nsx-vlan-transportzone-28", "segmentation_id": 28, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap19538d37-e3", "ovs_interfaceid": "19538d37-e369-4f7b-8051-61d2c0a7fb00", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1486.534718] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] [instance: 0491dc4b-cf35-4035-aca9-baf43b86af7e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:62:6a:7a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '60567ee6-01d0-4b16-9c7a-4a896827d6eb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '19538d37-e369-4f7b-8051-61d2c0a7fb00', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1486.547062] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Creating folder: Project (8cf1f39c8aef41df8c86777f80980664). Parent ref: group-v401284. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1486.547411] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c2951b6c-81a8-42fe-aa4e-f1db36d127fd {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.569493] env[62405]: DEBUG oslo_vmware.api [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Task: {'id': task-1946718, 'name': PowerOnVM_Task, 'duration_secs': 1.129069} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1486.569493] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Created folder: Project (8cf1f39c8aef41df8c86777f80980664) in parent group-v401284. [ 1486.569493] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Creating folder: Instances. Parent ref: group-v401304. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1486.569943] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] [instance: 6199de01-baca-4461-9572-111eda11adac] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1486.570251] env[62405]: INFO nova.compute.manager [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] [instance: 6199de01-baca-4461-9572-111eda11adac] Took 13.55 seconds to spawn the instance on the hypervisor. [ 1486.570320] env[62405]: DEBUG nova.compute.manager [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] [instance: 6199de01-baca-4461-9572-111eda11adac] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1486.570690] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d597b167-ff92-435d-bf06-f1dd3c6fba1c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.573332] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28a97113-9189-4b25-8b57-c17b7ddbf0a7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.587355] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Created folder: Instances in parent group-v401304. [ 1486.587501] env[62405]: DEBUG oslo.service.loopingcall [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1486.587700] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0491dc4b-cf35-4035-aca9-baf43b86af7e] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1486.587903] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-31e9ffde-c3fa-4950-8ed2-89a3423edd0e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.614654] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1486.614654] env[62405]: value = "task-1946725" [ 1486.614654] env[62405]: _type = "Task" [ 1486.614654] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1486.631774] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946725, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1486.678959] env[62405]: DEBUG oslo_vmware.api [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52f0f912-b931-d107-44b7-ff6a7d805aaa, 'name': SearchDatastore_Task, 'duration_secs': 0.059732} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1486.679895] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a0135a07-0458-4dfd-aec7-64b74a623927 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.688028] env[62405]: DEBUG oslo_vmware.api [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for the task: (returnval){ [ 1486.688028] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52aaf78d-90ff-1b45-ac78-6c68d2cb8f76" [ 1486.688028] env[62405]: _type = "Task" [ 1486.688028] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1486.705134] env[62405]: DEBUG oslo_vmware.api [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52aaf78d-90ff-1b45-ac78-6c68d2cb8f76, 'name': SearchDatastore_Task, 'duration_secs': 0.013065} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1486.705134] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1486.705339] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 8624629d-642a-4adf-984e-3925beeb4fef/8624629d-642a-4adf-984e-3925beeb4fef.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1486.707111] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fef42654-535f-4ced-8303-a3c4e16293e7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.716474] env[62405]: DEBUG oslo_vmware.api [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for the task: (returnval){ [ 1486.716474] env[62405]: value = "task-1946726" [ 1486.716474] env[62405]: _type = "Task" [ 1486.716474] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1486.727379] env[62405]: DEBUG oslo_vmware.api [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1946726, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1486.816944] env[62405]: DEBUG oslo_vmware.api [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Task: {'id': task-1946722, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.260687} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1486.817252] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] [instance: 262424b0-dc7d-4b6c-9539-2d6cd23a93da] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1486.818122] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f301ba63-9277-49ba-b010-1de7de0baefb {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.844020] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] [instance: 262424b0-dc7d-4b6c-9539-2d6cd23a93da] Reconfiguring VM instance instance-00000004 to attach disk [datastore1] 262424b0-dc7d-4b6c-9539-2d6cd23a93da/262424b0-dc7d-4b6c-9539-2d6cd23a93da.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1486.844768] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2946e288-6e9a-489a-a53c-56af11dab14a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.870844] env[62405]: DEBUG oslo_vmware.api [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Waiting for the task: (returnval){ [ 1486.870844] env[62405]: value = "task-1946727" [ 1486.870844] env[62405]: _type = "Task" [ 1486.870844] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1486.882858] env[62405]: DEBUG oslo_vmware.api [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Task: {'id': task-1946727, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1486.938028] env[62405]: DEBUG nova.scheduler.client.report [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Updated inventory for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with generation 16 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1486.938028] env[62405]: DEBUG nova.compute.provider_tree [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Updating resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 generation from 16 to 17 during operation: update_inventory {{(pid=62405) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1486.938028] env[62405]: DEBUG nova.compute.provider_tree [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1487.036767] env[62405]: DEBUG nova.network.neutron [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] [instance: 9b71f962-2b92-4f7b-bb8d-b50da5130018] Successfully created port: 0805ecfc-d6ef-4bff-a3a1-0f8af74e57a0 {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1487.073060] env[62405]: DEBUG nova.compute.manager [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] [instance: 9b71f962-2b92-4f7b-bb8d-b50da5130018] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1487.095605] env[62405]: INFO nova.compute.manager [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] [instance: 6199de01-baca-4461-9572-111eda11adac] Took 20.84 seconds to build instance. [ 1487.110794] env[62405]: DEBUG nova.virt.hardware [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1487.111156] env[62405]: DEBUG nova.virt.hardware [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1487.111540] env[62405]: DEBUG nova.virt.hardware [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1487.111787] env[62405]: DEBUG nova.virt.hardware [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1487.112016] env[62405]: DEBUG nova.virt.hardware [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1487.112339] env[62405]: DEBUG nova.virt.hardware [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1487.112605] env[62405]: DEBUG nova.virt.hardware [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1487.112782] env[62405]: DEBUG nova.virt.hardware [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1487.112958] env[62405]: DEBUG nova.virt.hardware [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1487.113224] env[62405]: DEBUG nova.virt.hardware [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1487.113434] env[62405]: DEBUG nova.virt.hardware [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1487.114672] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04e4a6df-fa43-43de-93d1-40f3fe7708ed {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.130965] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946725, 'name': CreateVM_Task, 'duration_secs': 0.375374} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1487.133739] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0491dc4b-cf35-4035-aca9-baf43b86af7e] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1487.134756] env[62405]: DEBUG oslo_concurrency.lockutils [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1487.134756] env[62405]: DEBUG oslo_concurrency.lockutils [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1487.135147] env[62405]: DEBUG oslo_concurrency.lockutils [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1487.136397] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ed61cea-7963-47c7-9fdd-8cc161b7bc3d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.141331] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5cae9430-12d6-46c6-8498-47419facf6c0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.148183] env[62405]: DEBUG oslo_vmware.api [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Waiting for the task: (returnval){ [ 1487.148183] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]521ee4b8-7ce4-db87-9426-ba8d8f51c94e" [ 1487.148183] env[62405]: _type = "Task" [ 1487.148183] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1487.171655] env[62405]: DEBUG oslo_vmware.api [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]521ee4b8-7ce4-db87-9426-ba8d8f51c94e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1487.228054] env[62405]: DEBUG oslo_vmware.api [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1946726, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1487.383639] env[62405]: DEBUG oslo_vmware.api [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Task: {'id': task-1946727, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1487.403712] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1487.417920] env[62405]: DEBUG nova.compute.manager [req-d20315f8-b668-4caf-bc5e-4712038d34b7 req-0f1c3e21-0406-44e3-8a4c-10aebd2155cc service nova] [instance: 8624629d-642a-4adf-984e-3925beeb4fef] Received event network-vif-plugged-15d03dd1-4edd-413d-a67d-3c877a40692a {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1487.419031] env[62405]: DEBUG oslo_concurrency.lockutils [req-d20315f8-b668-4caf-bc5e-4712038d34b7 req-0f1c3e21-0406-44e3-8a4c-10aebd2155cc service nova] Acquiring lock "8624629d-642a-4adf-984e-3925beeb4fef-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1487.419031] env[62405]: DEBUG oslo_concurrency.lockutils [req-d20315f8-b668-4caf-bc5e-4712038d34b7 req-0f1c3e21-0406-44e3-8a4c-10aebd2155cc service nova] Lock "8624629d-642a-4adf-984e-3925beeb4fef-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1487.419031] env[62405]: DEBUG oslo_concurrency.lockutils [req-d20315f8-b668-4caf-bc5e-4712038d34b7 req-0f1c3e21-0406-44e3-8a4c-10aebd2155cc service nova] Lock "8624629d-642a-4adf-984e-3925beeb4fef-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1487.419031] env[62405]: DEBUG nova.compute.manager [req-d20315f8-b668-4caf-bc5e-4712038d34b7 req-0f1c3e21-0406-44e3-8a4c-10aebd2155cc service nova] [instance: 8624629d-642a-4adf-984e-3925beeb4fef] No waiting events found dispatching network-vif-plugged-15d03dd1-4edd-413d-a67d-3c877a40692a {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1487.419588] env[62405]: WARNING nova.compute.manager [req-d20315f8-b668-4caf-bc5e-4712038d34b7 req-0f1c3e21-0406-44e3-8a4c-10aebd2155cc service nova] [instance: 8624629d-642a-4adf-984e-3925beeb4fef] Received unexpected event network-vif-plugged-15d03dd1-4edd-413d-a67d-3c877a40692a for instance with vm_state building and task_state spawning. [ 1487.419588] env[62405]: DEBUG nova.compute.manager [req-d20315f8-b668-4caf-bc5e-4712038d34b7 req-0f1c3e21-0406-44e3-8a4c-10aebd2155cc service nova] [instance: 8624629d-642a-4adf-984e-3925beeb4fef] Received event network-changed-15d03dd1-4edd-413d-a67d-3c877a40692a {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1487.419774] env[62405]: DEBUG nova.compute.manager [req-d20315f8-b668-4caf-bc5e-4712038d34b7 req-0f1c3e21-0406-44e3-8a4c-10aebd2155cc service nova] [instance: 8624629d-642a-4adf-984e-3925beeb4fef] Refreshing instance network info cache due to event network-changed-15d03dd1-4edd-413d-a67d-3c877a40692a. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1487.419987] env[62405]: DEBUG oslo_concurrency.lockutils [req-d20315f8-b668-4caf-bc5e-4712038d34b7 req-0f1c3e21-0406-44e3-8a4c-10aebd2155cc service nova] Acquiring lock "refresh_cache-8624629d-642a-4adf-984e-3925beeb4fef" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1487.420515] env[62405]: DEBUG oslo_concurrency.lockutils [req-d20315f8-b668-4caf-bc5e-4712038d34b7 req-0f1c3e21-0406-44e3-8a4c-10aebd2155cc service nova] Acquired lock "refresh_cache-8624629d-642a-4adf-984e-3925beeb4fef" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1487.420515] env[62405]: DEBUG nova.network.neutron [req-d20315f8-b668-4caf-bc5e-4712038d34b7 req-0f1c3e21-0406-44e3-8a4c-10aebd2155cc service nova] [instance: 8624629d-642a-4adf-984e-3925beeb4fef] Refreshing network info cache for port 15d03dd1-4edd-413d-a67d-3c877a40692a {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1487.441818] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.405s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1487.442407] env[62405]: DEBUG nova.compute.manager [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] [instance: 801e7086-5742-4a04-962c-7546284aa12d] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1487.446121] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.502s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1487.447721] env[62405]: INFO nova.compute.claims [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] [instance: 7db1b086-942e-4890-8750-0d717e522786] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1487.600161] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d14d02cd-92a7-428c-a217-29e5275121ea tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Lock "6199de01-baca-4461-9572-111eda11adac" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.351s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1487.671581] env[62405]: DEBUG oslo_vmware.api [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]521ee4b8-7ce4-db87-9426-ba8d8f51c94e, 'name': SearchDatastore_Task, 'duration_secs': 0.058357} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1487.671898] env[62405]: DEBUG oslo_concurrency.lockutils [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1487.672372] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] [instance: 0491dc4b-cf35-4035-aca9-baf43b86af7e] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1487.672445] env[62405]: DEBUG oslo_concurrency.lockutils [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1487.672528] env[62405]: DEBUG oslo_concurrency.lockutils [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1487.672781] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1487.673069] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-650e5a1d-0b40-4b86-b3d9-ee2c5d726631 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.684471] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1487.684839] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1487.685446] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d30c07c0-f593-4e4a-968a-5cee06908a96 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.694287] env[62405]: DEBUG oslo_vmware.api [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Waiting for the task: (returnval){ [ 1487.694287] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5256245d-797c-eede-3250-00a904d52651" [ 1487.694287] env[62405]: _type = "Task" [ 1487.694287] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1487.705617] env[62405]: DEBUG oslo_vmware.api [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5256245d-797c-eede-3250-00a904d52651, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1487.731286] env[62405]: DEBUG oslo_vmware.api [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1946726, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.558321} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1487.734823] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 8624629d-642a-4adf-984e-3925beeb4fef/8624629d-642a-4adf-984e-3925beeb4fef.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1487.735223] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 8624629d-642a-4adf-984e-3925beeb4fef] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1487.735642] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a776fda6-041b-44cf-b94a-81f9e0c280c1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.743687] env[62405]: DEBUG oslo_vmware.api [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for the task: (returnval){ [ 1487.743687] env[62405]: value = "task-1946728" [ 1487.743687] env[62405]: _type = "Task" [ 1487.743687] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1487.756978] env[62405]: DEBUG oslo_vmware.api [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1946728, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1487.887604] env[62405]: DEBUG oslo_vmware.api [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Task: {'id': task-1946727, 'name': ReconfigVM_Task, 'duration_secs': 0.663542} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1487.887931] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] [instance: 262424b0-dc7d-4b6c-9539-2d6cd23a93da] Reconfigured VM instance instance-00000004 to attach disk [datastore1] 262424b0-dc7d-4b6c-9539-2d6cd23a93da/262424b0-dc7d-4b6c-9539-2d6cd23a93da.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1487.888699] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0cdd5d6d-0f94-411b-a710-7cf9112b77aa {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.901796] env[62405]: DEBUG oslo_vmware.api [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Waiting for the task: (returnval){ [ 1487.901796] env[62405]: value = "task-1946729" [ 1487.901796] env[62405]: _type = "Task" [ 1487.901796] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1487.917293] env[62405]: DEBUG oslo_vmware.api [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Task: {'id': task-1946729, 'name': Rename_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1487.959219] env[62405]: DEBUG nova.compute.utils [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1487.960983] env[62405]: DEBUG nova.compute.manager [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] [instance: 801e7086-5742-4a04-962c-7546284aa12d] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1487.961072] env[62405]: DEBUG nova.network.neutron [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] [instance: 801e7086-5742-4a04-962c-7546284aa12d] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1488.055851] env[62405]: DEBUG oslo_concurrency.lockutils [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Acquiring lock "02abae6c-8962-49eb-8fa9-36b13a20eff1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1488.056225] env[62405]: DEBUG oslo_concurrency.lockutils [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Lock "02abae6c-8962-49eb-8fa9-36b13a20eff1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1488.214315] env[62405]: DEBUG oslo_vmware.api [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5256245d-797c-eede-3250-00a904d52651, 'name': SearchDatastore_Task, 'duration_secs': 0.03149} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1488.215301] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0bb7db3a-be26-40fb-8780-f78425a62fe3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.224450] env[62405]: DEBUG oslo_vmware.api [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Waiting for the task: (returnval){ [ 1488.224450] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52654ea7-320a-ff1e-2c37-6aea604a9fba" [ 1488.224450] env[62405]: _type = "Task" [ 1488.224450] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1488.234695] env[62405]: DEBUG oslo_vmware.api [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52654ea7-320a-ff1e-2c37-6aea604a9fba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1488.254861] env[62405]: DEBUG oslo_vmware.api [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1946728, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.277753} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1488.255191] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 8624629d-642a-4adf-984e-3925beeb4fef] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1488.256069] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-992ae275-2038-4946-8ea3-3578ff51e60f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.282681] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 8624629d-642a-4adf-984e-3925beeb4fef] Reconfiguring VM instance instance-00000005 to attach disk [datastore1] 8624629d-642a-4adf-984e-3925beeb4fef/8624629d-642a-4adf-984e-3925beeb4fef.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1488.282916] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e4b04734-c0d4-4643-8311-8b6dded53722 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.307904] env[62405]: DEBUG oslo_vmware.api [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for the task: (returnval){ [ 1488.307904] env[62405]: value = "task-1946730" [ 1488.307904] env[62405]: _type = "Task" [ 1488.307904] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1488.317807] env[62405]: DEBUG oslo_vmware.api [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1946730, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1488.380133] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Acquiring lock "8995f9cb-8454-4a98-9090-290f87f8af18" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1488.380612] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Lock "8995f9cb-8454-4a98-9090-290f87f8af18" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1488.403513] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1488.411504] env[62405]: DEBUG oslo_vmware.api [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Task: {'id': task-1946729, 'name': Rename_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1488.419563] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Acquiring lock "a1d35009-ea11-4e64-bbe4-604ed39d08f4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1488.420347] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Lock "a1d35009-ea11-4e64-bbe4-604ed39d08f4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1488.452837] env[62405]: DEBUG nova.network.neutron [req-d20315f8-b668-4caf-bc5e-4712038d34b7 req-0f1c3e21-0406-44e3-8a4c-10aebd2155cc service nova] [instance: 8624629d-642a-4adf-984e-3925beeb4fef] Updated VIF entry in instance network info cache for port 15d03dd1-4edd-413d-a67d-3c877a40692a. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1488.453267] env[62405]: DEBUG nova.network.neutron [req-d20315f8-b668-4caf-bc5e-4712038d34b7 req-0f1c3e21-0406-44e3-8a4c-10aebd2155cc service nova] [instance: 8624629d-642a-4adf-984e-3925beeb4fef] Updating instance_info_cache with network_info: [{"id": "15d03dd1-4edd-413d-a67d-3c877a40692a", "address": "fa:16:3e:f3:04:d2", "network": {"id": "7398b956-045a-440c-b8fc-34bad57dbc27", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1969082667-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "41626e27199f4370a2554bb243a72d41", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "50171613-b419-45e3-9ada-fcb6cd921428", "external-id": "nsx-vlan-transportzone-914", "segmentation_id": 914, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap15d03dd1-4e", "ovs_interfaceid": "15d03dd1-4edd-413d-a67d-3c877a40692a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1488.459824] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Acquiring lock "ca0aca02-4b99-4393-900c-b9cb0dad55c7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1488.460214] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Lock "ca0aca02-4b99-4393-900c-b9cb0dad55c7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1488.467096] env[62405]: DEBUG nova.compute.manager [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] [instance: 801e7086-5742-4a04-962c-7546284aa12d] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1488.484051] env[62405]: DEBUG nova.policy [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '73c8b8bd95654f4f9b9f810f56ff1748', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '83958cd7b67e4b9db908c6445af0a129', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1488.563280] env[62405]: DEBUG nova.compute.manager [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] [instance: 02abae6c-8962-49eb-8fa9-36b13a20eff1] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1488.740626] env[62405]: DEBUG oslo_vmware.api [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52654ea7-320a-ff1e-2c37-6aea604a9fba, 'name': SearchDatastore_Task, 'duration_secs': 0.01696} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1488.742512] env[62405]: DEBUG oslo_concurrency.lockutils [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1488.742750] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 0491dc4b-cf35-4035-aca9-baf43b86af7e/0491dc4b-cf35-4035-aca9-baf43b86af7e.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1488.743807] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b905db37-88b9-4ba6-9ae1-820042d31cac {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.748394] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-51e72a1d-67a0-409f-86c9-942674e8c77e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.759665] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b50e32e0-292c-45ab-a6c3-0849d42761df {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.764920] env[62405]: DEBUG oslo_vmware.api [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Waiting for the task: (returnval){ [ 1488.764920] env[62405]: value = "task-1946731" [ 1488.764920] env[62405]: _type = "Task" [ 1488.764920] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1488.802298] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1ea0eb9-193d-4730-94cb-79253b4b77bc {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.806640] env[62405]: DEBUG oslo_vmware.api [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Task: {'id': task-1946731, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1488.815783] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e54b5c6-4472-43b9-8d73-2b828f32f989 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.825781] env[62405]: DEBUG oslo_vmware.api [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1946730, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1488.836850] env[62405]: DEBUG nova.compute.provider_tree [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1488.883412] env[62405]: DEBUG nova.compute.manager [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: 8995f9cb-8454-4a98-9090-290f87f8af18] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1488.914445] env[62405]: DEBUG oslo_vmware.api [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Task: {'id': task-1946729, 'name': Rename_Task, 'duration_secs': 0.779779} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1488.914445] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] [instance: 262424b0-dc7d-4b6c-9539-2d6cd23a93da] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1488.914445] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d599090a-bcfe-4dfa-bb6a-0942ccfb1032 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.924024] env[62405]: DEBUG nova.compute.manager [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: a1d35009-ea11-4e64-bbe4-604ed39d08f4] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1488.926383] env[62405]: DEBUG oslo_vmware.api [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Waiting for the task: (returnval){ [ 1488.926383] env[62405]: value = "task-1946732" [ 1488.926383] env[62405]: _type = "Task" [ 1488.926383] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1488.941699] env[62405]: DEBUG oslo_vmware.api [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Task: {'id': task-1946732, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1488.958065] env[62405]: DEBUG oslo_concurrency.lockutils [req-d20315f8-b668-4caf-bc5e-4712038d34b7 req-0f1c3e21-0406-44e3-8a4c-10aebd2155cc service nova] Releasing lock "refresh_cache-8624629d-642a-4adf-984e-3925beeb4fef" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1488.959220] env[62405]: DEBUG nova.compute.manager [req-d20315f8-b668-4caf-bc5e-4712038d34b7 req-0f1c3e21-0406-44e3-8a4c-10aebd2155cc service nova] [instance: 0491dc4b-cf35-4035-aca9-baf43b86af7e] Received event network-vif-plugged-19538d37-e369-4f7b-8051-61d2c0a7fb00 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1488.959220] env[62405]: DEBUG oslo_concurrency.lockutils [req-d20315f8-b668-4caf-bc5e-4712038d34b7 req-0f1c3e21-0406-44e3-8a4c-10aebd2155cc service nova] Acquiring lock "0491dc4b-cf35-4035-aca9-baf43b86af7e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1488.959581] env[62405]: DEBUG oslo_concurrency.lockutils [req-d20315f8-b668-4caf-bc5e-4712038d34b7 req-0f1c3e21-0406-44e3-8a4c-10aebd2155cc service nova] Lock "0491dc4b-cf35-4035-aca9-baf43b86af7e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1488.959581] env[62405]: DEBUG oslo_concurrency.lockutils [req-d20315f8-b668-4caf-bc5e-4712038d34b7 req-0f1c3e21-0406-44e3-8a4c-10aebd2155cc service nova] Lock "0491dc4b-cf35-4035-aca9-baf43b86af7e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1488.959761] env[62405]: DEBUG nova.compute.manager [req-d20315f8-b668-4caf-bc5e-4712038d34b7 req-0f1c3e21-0406-44e3-8a4c-10aebd2155cc service nova] [instance: 0491dc4b-cf35-4035-aca9-baf43b86af7e] No waiting events found dispatching network-vif-plugged-19538d37-e369-4f7b-8051-61d2c0a7fb00 {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1488.959950] env[62405]: WARNING nova.compute.manager [req-d20315f8-b668-4caf-bc5e-4712038d34b7 req-0f1c3e21-0406-44e3-8a4c-10aebd2155cc service nova] [instance: 0491dc4b-cf35-4035-aca9-baf43b86af7e] Received unexpected event network-vif-plugged-19538d37-e369-4f7b-8051-61d2c0a7fb00 for instance with vm_state building and task_state spawning. [ 1489.001189] env[62405]: DEBUG nova.network.neutron [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 058682a1-5240-4414-9203-c612ecd12999] Successfully updated port: f2f99aa3-770a-41cb-bb49-775f9f0f2708 {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1489.094258] env[62405]: DEBUG oslo_concurrency.lockutils [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1489.281942] env[62405]: DEBUG oslo_vmware.api [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Task: {'id': task-1946731, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1489.322825] env[62405]: DEBUG oslo_vmware.api [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1946730, 'name': ReconfigVM_Task, 'duration_secs': 0.523758} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1489.323275] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 8624629d-642a-4adf-984e-3925beeb4fef] Reconfigured VM instance instance-00000005 to attach disk [datastore1] 8624629d-642a-4adf-984e-3925beeb4fef/8624629d-642a-4adf-984e-3925beeb4fef.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1489.324168] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cd960218-3f5e-422f-92ec-55dfe2f82563 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.336079] env[62405]: DEBUG oslo_vmware.api [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for the task: (returnval){ [ 1489.336079] env[62405]: value = "task-1946733" [ 1489.336079] env[62405]: _type = "Task" [ 1489.336079] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1489.340687] env[62405]: DEBUG nova.scheduler.client.report [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1489.353999] env[62405]: DEBUG oslo_vmware.api [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1946733, 'name': Rename_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1489.401103] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1489.417239] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1489.440908] env[62405]: DEBUG oslo_vmware.api [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Task: {'id': task-1946732, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1489.461944] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1489.477385] env[62405]: DEBUG nova.compute.manager [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] [instance: 801e7086-5742-4a04-962c-7546284aa12d] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1489.506482] env[62405]: DEBUG oslo_concurrency.lockutils [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Acquiring lock "refresh_cache-058682a1-5240-4414-9203-c612ecd12999" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1489.506482] env[62405]: DEBUG oslo_concurrency.lockutils [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Acquired lock "refresh_cache-058682a1-5240-4414-9203-c612ecd12999" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1489.506482] env[62405]: DEBUG nova.network.neutron [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 058682a1-5240-4414-9203-c612ecd12999] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1489.515190] env[62405]: DEBUG nova.virt.hardware [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1489.515477] env[62405]: DEBUG nova.virt.hardware [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1489.515588] env[62405]: DEBUG nova.virt.hardware [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1489.515768] env[62405]: DEBUG nova.virt.hardware [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1489.515987] env[62405]: DEBUG nova.virt.hardware [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1489.516208] env[62405]: DEBUG nova.virt.hardware [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1489.516461] env[62405]: DEBUG nova.virt.hardware [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1489.516626] env[62405]: DEBUG nova.virt.hardware [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1489.516841] env[62405]: DEBUG nova.virt.hardware [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1489.517030] env[62405]: DEBUG nova.virt.hardware [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1489.517131] env[62405]: DEBUG nova.virt.hardware [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1489.518036] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-124a25b1-5de8-4741-befe-e1716178916a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.529835] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f3c79c4-d4c9-4bc6-9e9b-504ffb1680e2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.777907] env[62405]: DEBUG oslo_vmware.api [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Task: {'id': task-1946731, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.669331} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1489.778405] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 0491dc4b-cf35-4035-aca9-baf43b86af7e/0491dc4b-cf35-4035-aca9-baf43b86af7e.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1489.780025] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] [instance: 0491dc4b-cf35-4035-aca9-baf43b86af7e] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1489.780025] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-11082e12-d5e2-4428-a173-edfbec37762d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.790284] env[62405]: DEBUG oslo_vmware.api [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Waiting for the task: (returnval){ [ 1489.790284] env[62405]: value = "task-1946734" [ 1489.790284] env[62405]: _type = "Task" [ 1489.790284] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1489.799380] env[62405]: DEBUG oslo_vmware.api [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Task: {'id': task-1946734, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1489.848459] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.402s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1489.849315] env[62405]: DEBUG nova.compute.manager [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] [instance: 7db1b086-942e-4890-8750-0d717e522786] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1489.852605] env[62405]: DEBUG oslo_vmware.api [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1946733, 'name': Rename_Task, 'duration_secs': 0.259163} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1489.853295] env[62405]: DEBUG oslo_concurrency.lockutils [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.759s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1489.855101] env[62405]: INFO nova.compute.claims [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] [instance: 02abae6c-8962-49eb-8fa9-36b13a20eff1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1489.858054] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 8624629d-642a-4adf-984e-3925beeb4fef] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1489.859032] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-37713c21-6576-4d72-96ad-fbd99e6cd019 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.871517] env[62405]: DEBUG oslo_vmware.api [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for the task: (returnval){ [ 1489.871517] env[62405]: value = "task-1946735" [ 1489.871517] env[62405]: _type = "Task" [ 1489.871517] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1489.873323] env[62405]: DEBUG nova.network.neutron [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] [instance: 9b71f962-2b92-4f7b-bb8d-b50da5130018] Successfully updated port: 0805ecfc-d6ef-4bff-a3a1-0f8af74e57a0 {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1489.882543] env[62405]: DEBUG oslo_vmware.api [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1946735, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1489.947168] env[62405]: DEBUG oslo_vmware.api [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Task: {'id': task-1946732, 'name': PowerOnVM_Task, 'duration_secs': 1.024339} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1489.947591] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] [instance: 262424b0-dc7d-4b6c-9539-2d6cd23a93da] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1489.947682] env[62405]: INFO nova.compute.manager [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] [instance: 262424b0-dc7d-4b6c-9539-2d6cd23a93da] Took 14.68 seconds to spawn the instance on the hypervisor. [ 1489.947839] env[62405]: DEBUG nova.compute.manager [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] [instance: 262424b0-dc7d-4b6c-9539-2d6cd23a93da] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1489.948657] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0929182e-6392-4d9f-b301-b721f91b3143 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.106630] env[62405]: DEBUG nova.network.neutron [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 058682a1-5240-4414-9203-c612ecd12999] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1490.302998] env[62405]: DEBUG oslo_vmware.api [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Task: {'id': task-1946734, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073885} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1490.304207] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] [instance: 0491dc4b-cf35-4035-aca9-baf43b86af7e] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1490.304290] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7316168b-d4c6-46af-a736-fbe9f3701c2e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.331868] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] [instance: 0491dc4b-cf35-4035-aca9-baf43b86af7e] Reconfiguring VM instance instance-00000006 to attach disk [datastore1] 0491dc4b-cf35-4035-aca9-baf43b86af7e/0491dc4b-cf35-4035-aca9-baf43b86af7e.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1490.335914] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7a263668-24bc-4d77-932f-d156accafd72 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.355030] env[62405]: DEBUG nova.compute.utils [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1490.359868] env[62405]: DEBUG nova.compute.manager [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] [instance: 7db1b086-942e-4890-8750-0d717e522786] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1490.359868] env[62405]: DEBUG nova.network.neutron [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] [instance: 7db1b086-942e-4890-8750-0d717e522786] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1490.365538] env[62405]: DEBUG oslo_vmware.api [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Waiting for the task: (returnval){ [ 1490.365538] env[62405]: value = "task-1946736" [ 1490.365538] env[62405]: _type = "Task" [ 1490.365538] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1490.377553] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Acquiring lock "refresh_cache-9b71f962-2b92-4f7b-bb8d-b50da5130018" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1490.377925] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Acquired lock "refresh_cache-9b71f962-2b92-4f7b-bb8d-b50da5130018" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1490.378374] env[62405]: DEBUG nova.network.neutron [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] [instance: 9b71f962-2b92-4f7b-bb8d-b50da5130018] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1490.379852] env[62405]: DEBUG oslo_vmware.api [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Task: {'id': task-1946736, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1490.390218] env[62405]: DEBUG oslo_vmware.api [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1946735, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1490.401779] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1490.479480] env[62405]: INFO nova.compute.manager [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] [instance: 262424b0-dc7d-4b6c-9539-2d6cd23a93da] Took 22.03 seconds to build instance. [ 1490.511426] env[62405]: DEBUG nova.network.neutron [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] [instance: 801e7086-5742-4a04-962c-7546284aa12d] Successfully created port: a6c201e5-eb87-434f-9c74-9f99937836fd {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1490.519969] env[62405]: DEBUG nova.policy [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cb0e6766a5a04f03adefbbeb620c59d1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4ac6737e7e8649d5a1061806cb927ed6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1490.858591] env[62405]: DEBUG nova.compute.manager [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] [instance: 7db1b086-942e-4890-8750-0d717e522786] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1490.880076] env[62405]: DEBUG oslo_vmware.api [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1946735, 'name': PowerOnVM_Task} progress is 78%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1490.889957] env[62405]: DEBUG oslo_vmware.api [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Task: {'id': task-1946736, 'name': ReconfigVM_Task, 'duration_secs': 0.463088} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1490.889957] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] [instance: 0491dc4b-cf35-4035-aca9-baf43b86af7e] Reconfigured VM instance instance-00000006 to attach disk [datastore1] 0491dc4b-cf35-4035-aca9-baf43b86af7e/0491dc4b-cf35-4035-aca9-baf43b86af7e.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1490.889957] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-61c8510c-0530-4ef5-8986-ec97be5e9076 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.900370] env[62405]: DEBUG oslo_vmware.api [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Waiting for the task: (returnval){ [ 1490.900370] env[62405]: value = "task-1946737" [ 1490.900370] env[62405]: _type = "Task" [ 1490.900370] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1490.914161] env[62405]: DEBUG oslo_vmware.api [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Task: {'id': task-1946737, 'name': Rename_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1490.920883] env[62405]: DEBUG nova.network.neutron [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 058682a1-5240-4414-9203-c612ecd12999] Updating instance_info_cache with network_info: [{"id": "f2f99aa3-770a-41cb-bb49-775f9f0f2708", "address": "fa:16:3e:ed:e3:4c", "network": {"id": "9e3e6e3d-df77-428f-b577-e4a42e82ec43", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.165", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "69dc3a146cd14230b1180689e2fea090", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31e77685-b4dd-4810-80ef-24115ea9ea62", "external-id": "nsx-vlan-transportzone-56", "segmentation_id": 56, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf2f99aa3-77", "ovs_interfaceid": "f2f99aa3-770a-41cb-bb49-775f9f0f2708", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1490.977892] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a23a4d50-b5cf-4ea6-8065-0046c6374ec1 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Lock "262424b0-dc7d-4b6c-9539-2d6cd23a93da" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.541s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1490.995526] env[62405]: DEBUG nova.network.neutron [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] [instance: 9b71f962-2b92-4f7b-bb8d-b50da5130018] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1491.277322] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d65096c-9259-40ed-99d1-ac76692a8184 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.289292] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ab76cbe-0f9e-4ddb-83db-245c8bd7c490 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.326802] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b33bbe67-988f-4c2e-8588-e16a05e3430c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.337693] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-211e18d9-087a-4923-9540-1c448f4dbe7a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.352655] env[62405]: DEBUG nova.compute.provider_tree [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1491.378840] env[62405]: DEBUG oslo_vmware.api [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1946735, 'name': PowerOnVM_Task, 'duration_secs': 1.257452} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1491.379125] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 8624629d-642a-4adf-984e-3925beeb4fef] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1491.379327] env[62405]: INFO nova.compute.manager [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 8624629d-642a-4adf-984e-3925beeb4fef] Took 13.34 seconds to spawn the instance on the hypervisor. [ 1491.379502] env[62405]: DEBUG nova.compute.manager [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 8624629d-642a-4adf-984e-3925beeb4fef] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1491.380276] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcc6bae8-5b41-44d8-903e-784f8f61a305 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.402030] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1491.402030] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Starting heal instance info cache {{(pid=62405) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10446}} [ 1491.402030] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Rebuilding the list of instances to heal {{(pid=62405) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10450}} [ 1491.416181] env[62405]: DEBUG oslo_vmware.api [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Task: {'id': task-1946737, 'name': Rename_Task, 'duration_secs': 0.169654} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1491.417073] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] [instance: 0491dc4b-cf35-4035-aca9-baf43b86af7e] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1491.417333] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-831c7049-09cd-4c76-a555-0c263c81cb85 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.426267] env[62405]: DEBUG oslo_vmware.api [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Waiting for the task: (returnval){ [ 1491.426267] env[62405]: value = "task-1946738" [ 1491.426267] env[62405]: _type = "Task" [ 1491.426267] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1491.431529] env[62405]: DEBUG oslo_concurrency.lockutils [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Releasing lock "refresh_cache-058682a1-5240-4414-9203-c612ecd12999" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1491.432844] env[62405]: DEBUG nova.compute.manager [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 058682a1-5240-4414-9203-c612ecd12999] Instance network_info: |[{"id": "f2f99aa3-770a-41cb-bb49-775f9f0f2708", "address": "fa:16:3e:ed:e3:4c", "network": {"id": "9e3e6e3d-df77-428f-b577-e4a42e82ec43", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.165", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "69dc3a146cd14230b1180689e2fea090", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31e77685-b4dd-4810-80ef-24115ea9ea62", "external-id": "nsx-vlan-transportzone-56", "segmentation_id": 56, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf2f99aa3-77", "ovs_interfaceid": "f2f99aa3-770a-41cb-bb49-775f9f0f2708", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1491.433203] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 058682a1-5240-4414-9203-c612ecd12999] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ed:e3:4c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '31e77685-b4dd-4810-80ef-24115ea9ea62', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f2f99aa3-770a-41cb-bb49-775f9f0f2708', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1491.441679] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Creating folder: Project (e3cd6b7f1ce346e98fe8bff2423f34ab). Parent ref: group-v401284. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1491.442807] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-db2f117d-3268-46af-b8d7-8f433b43ea89 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.451205] env[62405]: DEBUG oslo_vmware.api [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Task: {'id': task-1946738, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1491.465706] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Created folder: Project (e3cd6b7f1ce346e98fe8bff2423f34ab) in parent group-v401284. [ 1491.466132] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Creating folder: Instances. Parent ref: group-v401307. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1491.466252] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b6605043-e2a2-4103-aa55-e4be5909b124 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.478661] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Created folder: Instances in parent group-v401307. [ 1491.478793] env[62405]: DEBUG oslo.service.loopingcall [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1491.478966] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 058682a1-5240-4414-9203-c612ecd12999] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1491.479215] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9db7f248-606e-487e-9167-8b2e711f18c8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.494371] env[62405]: DEBUG nova.compute.manager [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: ca0aca02-4b99-4393-900c-b9cb0dad55c7] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1491.504356] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1491.504356] env[62405]: value = "task-1946741" [ 1491.504356] env[62405]: _type = "Task" [ 1491.504356] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1491.508785] env[62405]: DEBUG nova.network.neutron [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] [instance: 9b71f962-2b92-4f7b-bb8d-b50da5130018] Updating instance_info_cache with network_info: [{"id": "0805ecfc-d6ef-4bff-a3a1-0f8af74e57a0", "address": "fa:16:3e:f7:0a:02", "network": {"id": "672e2f4d-571c-431f-bc4f-101f0e233d70", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-758460415-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "37e71f387ca845b99564479baf7a9012", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4fc0575-c8e3-4f3c-b2e1-e10ac2d0cc1a", "external-id": "nsx-vlan-transportzone-256", "segmentation_id": 256, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0805ecfc-d6", "ovs_interfaceid": "0805ecfc-d6ef-4bff-a3a1-0f8af74e57a0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1491.516120] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946741, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1491.813855] env[62405]: DEBUG nova.network.neutron [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] [instance: 7db1b086-942e-4890-8750-0d717e522786] Successfully created port: acb33455-b824-40fd-99bd-4628778412a0 {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1491.856544] env[62405]: DEBUG nova.scheduler.client.report [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1491.870559] env[62405]: DEBUG nova.compute.manager [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] [instance: 7db1b086-942e-4890-8750-0d717e522786] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1491.899571] env[62405]: DEBUG nova.virt.hardware [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1491.899571] env[62405]: DEBUG nova.virt.hardware [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1491.899571] env[62405]: DEBUG nova.virt.hardware [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1491.899886] env[62405]: DEBUG nova.virt.hardware [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1491.899886] env[62405]: DEBUG nova.virt.hardware [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1491.900113] env[62405]: DEBUG nova.virt.hardware [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1491.900217] env[62405]: DEBUG nova.virt.hardware [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1491.900379] env[62405]: DEBUG nova.virt.hardware [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1491.900622] env[62405]: DEBUG nova.virt.hardware [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1491.900752] env[62405]: DEBUG nova.virt.hardware [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1491.900926] env[62405]: DEBUG nova.virt.hardware [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1491.902052] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64bdd05c-e4bc-4753-a21e-7e7dd8de9db1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.906325] env[62405]: INFO nova.compute.manager [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 8624629d-642a-4adf-984e-3925beeb4fef] Took 18.27 seconds to build instance. [ 1491.916236] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 8624629d-642a-4adf-984e-3925beeb4fef] Skipping network cache update for instance because it is Building. {{(pid=62405) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10459}} [ 1491.916465] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 0491dc4b-cf35-4035-aca9-baf43b86af7e] Skipping network cache update for instance because it is Building. {{(pid=62405) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10459}} [ 1491.916728] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 058682a1-5240-4414-9203-c612ecd12999] Skipping network cache update for instance because it is Building. {{(pid=62405) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10459}} [ 1491.916956] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 9b71f962-2b92-4f7b-bb8d-b50da5130018] Skipping network cache update for instance because it is Building. {{(pid=62405) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10459}} [ 1491.917247] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 801e7086-5742-4a04-962c-7546284aa12d] Skipping network cache update for instance because it is Building. {{(pid=62405) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10459}} [ 1491.917388] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 7db1b086-942e-4890-8750-0d717e522786] Skipping network cache update for instance because it is Building. {{(pid=62405) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10459}} [ 1491.917569] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 02abae6c-8962-49eb-8fa9-36b13a20eff1] Skipping network cache update for instance because it is Building. {{(pid=62405) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10459}} [ 1491.923052] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-933f9a7b-1606-4757-9fba-cc468c48f865 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.949707] env[62405]: DEBUG oslo_vmware.api [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Task: {'id': task-1946738, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1491.959752] env[62405]: DEBUG nova.compute.manager [req-b5b04695-f9f8-4d20-918f-1e6f0f998812 req-39f46897-6294-4c96-905c-1f592e61089d service nova] [instance: 058682a1-5240-4414-9203-c612ecd12999] Received event network-vif-plugged-f2f99aa3-770a-41cb-bb49-775f9f0f2708 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1491.960274] env[62405]: DEBUG oslo_concurrency.lockutils [req-b5b04695-f9f8-4d20-918f-1e6f0f998812 req-39f46897-6294-4c96-905c-1f592e61089d service nova] Acquiring lock "058682a1-5240-4414-9203-c612ecd12999-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1491.960274] env[62405]: DEBUG oslo_concurrency.lockutils [req-b5b04695-f9f8-4d20-918f-1e6f0f998812 req-39f46897-6294-4c96-905c-1f592e61089d service nova] Lock "058682a1-5240-4414-9203-c612ecd12999-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1491.960557] env[62405]: DEBUG oslo_concurrency.lockutils [req-b5b04695-f9f8-4d20-918f-1e6f0f998812 req-39f46897-6294-4c96-905c-1f592e61089d service nova] Lock "058682a1-5240-4414-9203-c612ecd12999-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1491.960875] env[62405]: DEBUG nova.compute.manager [req-b5b04695-f9f8-4d20-918f-1e6f0f998812 req-39f46897-6294-4c96-905c-1f592e61089d service nova] [instance: 058682a1-5240-4414-9203-c612ecd12999] No waiting events found dispatching network-vif-plugged-f2f99aa3-770a-41cb-bb49-775f9f0f2708 {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1491.961096] env[62405]: WARNING nova.compute.manager [req-b5b04695-f9f8-4d20-918f-1e6f0f998812 req-39f46897-6294-4c96-905c-1f592e61089d service nova] [instance: 058682a1-5240-4414-9203-c612ecd12999] Received unexpected event network-vif-plugged-f2f99aa3-770a-41cb-bb49-775f9f0f2708 for instance with vm_state building and task_state spawning. [ 1491.991285] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Acquiring lock "refresh_cache-2257c786-54f9-441a-832c-cf3178bfcc78" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1491.992706] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Acquired lock "refresh_cache-2257c786-54f9-441a-832c-cf3178bfcc78" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1491.994041] env[62405]: DEBUG nova.network.neutron [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 2257c786-54f9-441a-832c-cf3178bfcc78] Forcefully refreshing network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1491.995310] env[62405]: DEBUG nova.objects.instance [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lazy-loading 'info_cache' on Instance uuid 2257c786-54f9-441a-832c-cf3178bfcc78 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1492.011715] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Releasing lock "refresh_cache-9b71f962-2b92-4f7b-bb8d-b50da5130018" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1492.012079] env[62405]: DEBUG nova.compute.manager [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] [instance: 9b71f962-2b92-4f7b-bb8d-b50da5130018] Instance network_info: |[{"id": "0805ecfc-d6ef-4bff-a3a1-0f8af74e57a0", "address": "fa:16:3e:f7:0a:02", "network": {"id": "672e2f4d-571c-431f-bc4f-101f0e233d70", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-758460415-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "37e71f387ca845b99564479baf7a9012", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4fc0575-c8e3-4f3c-b2e1-e10ac2d0cc1a", "external-id": "nsx-vlan-transportzone-256", "segmentation_id": 256, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0805ecfc-d6", "ovs_interfaceid": "0805ecfc-d6ef-4bff-a3a1-0f8af74e57a0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1492.016221] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] [instance: 9b71f962-2b92-4f7b-bb8d-b50da5130018] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f7:0a:02', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a4fc0575-c8e3-4f3c-b2e1-e10ac2d0cc1a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0805ecfc-d6ef-4bff-a3a1-0f8af74e57a0', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1492.024094] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Creating folder: Project (37e71f387ca845b99564479baf7a9012). Parent ref: group-v401284. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1492.024426] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946741, 'name': CreateVM_Task, 'duration_secs': 0.394279} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1492.025564] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1492.025899] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0f1c89a6-292d-4fc1-bf76-704b5acd5753 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.030129] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 058682a1-5240-4414-9203-c612ecd12999] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1492.030129] env[62405]: DEBUG oslo_concurrency.lockutils [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1492.030129] env[62405]: DEBUG oslo_concurrency.lockutils [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1492.030129] env[62405]: DEBUG oslo_concurrency.lockutils [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1492.030458] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-851439ae-690b-4449-af51-44609c69d668 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.041952] env[62405]: DEBUG oslo_vmware.api [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Waiting for the task: (returnval){ [ 1492.041952] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52facacf-8637-ce50-601e-074c817e4db5" [ 1492.041952] env[62405]: _type = "Task" [ 1492.041952] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1492.048237] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Created folder: Project (37e71f387ca845b99564479baf7a9012) in parent group-v401284. [ 1492.048237] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Creating folder: Instances. Parent ref: group-v401310. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1492.054397] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f8e682d1-3537-4e8c-ab45-a6e05518e393 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.068361] env[62405]: DEBUG oslo_vmware.api [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52facacf-8637-ce50-601e-074c817e4db5, 'name': SearchDatastore_Task, 'duration_secs': 0.022748} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1492.072299] env[62405]: DEBUG oslo_concurrency.lockutils [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1492.073029] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 058682a1-5240-4414-9203-c612ecd12999] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1492.073029] env[62405]: DEBUG oslo_concurrency.lockutils [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1492.073029] env[62405]: DEBUG oslo_concurrency.lockutils [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1492.074039] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1492.074746] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Created folder: Instances in parent group-v401310. [ 1492.077490] env[62405]: DEBUG oslo.service.loopingcall [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1492.078620] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-32987ae2-958a-4333-b2c5-68fb2f55e787 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.082517] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9b71f962-2b92-4f7b-bb8d-b50da5130018] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1492.083639] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b3dde842-b722-45cc-98d2-aa4b20f36a8d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.126543] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1492.126543] env[62405]: value = "task-1946744" [ 1492.126543] env[62405]: _type = "Task" [ 1492.126543] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1492.127881] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1492.128490] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1492.133357] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-737474aa-3876-4e41-8958-9e69f0226578 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.144131] env[62405]: DEBUG oslo_vmware.api [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Waiting for the task: (returnval){ [ 1492.144131] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52e42f36-f8d4-9a22-f50f-3c0bda053ecb" [ 1492.144131] env[62405]: _type = "Task" [ 1492.144131] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1492.144131] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946744, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1492.156108] env[62405]: DEBUG oslo_vmware.api [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52e42f36-f8d4-9a22-f50f-3c0bda053ecb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1492.362825] env[62405]: DEBUG oslo_concurrency.lockutils [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.509s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1492.363261] env[62405]: DEBUG nova.compute.manager [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] [instance: 02abae6c-8962-49eb-8fa9-36b13a20eff1] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1492.366353] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.949s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1492.367789] env[62405]: INFO nova.compute.claims [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: 8995f9cb-8454-4a98-9090-290f87f8af18] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1492.410050] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a58e1185-9b1d-408e-860f-cfd3f495bc0b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Lock "8624629d-642a-4adf-984e-3925beeb4fef" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.785s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1492.442250] env[62405]: DEBUG oslo_vmware.api [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Task: {'id': task-1946738, 'name': PowerOnVM_Task, 'duration_secs': 0.539225} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1492.442682] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] [instance: 0491dc4b-cf35-4035-aca9-baf43b86af7e] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1492.442940] env[62405]: INFO nova.compute.manager [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] [instance: 0491dc4b-cf35-4035-aca9-baf43b86af7e] Took 12.09 seconds to spawn the instance on the hypervisor. [ 1492.445796] env[62405]: DEBUG nova.compute.manager [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] [instance: 0491dc4b-cf35-4035-aca9-baf43b86af7e] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1492.445796] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-118343a9-f249-4567-9d2a-4c7c249c26c1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.643936] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946744, 'name': CreateVM_Task, 'duration_secs': 0.48722} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1492.644112] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9b71f962-2b92-4f7b-bb8d-b50da5130018] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1492.644907] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1492.645101] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1492.645431] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1492.651137] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ef14faeb-ebb4-41a3-80be-dd83a1e679ef {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.661103] env[62405]: DEBUG oslo_vmware.api [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52e42f36-f8d4-9a22-f50f-3c0bda053ecb, 'name': SearchDatastore_Task, 'duration_secs': 0.015449} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1492.663569] env[62405]: DEBUG oslo_vmware.api [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Waiting for the task: (returnval){ [ 1492.663569] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52de5651-c7c2-11bd-ae38-8fe5b280c30c" [ 1492.663569] env[62405]: _type = "Task" [ 1492.663569] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1492.663820] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7d58a5e7-eded-48d7-998c-d69fc2211eb5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.674677] env[62405]: DEBUG oslo_vmware.api [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Waiting for the task: (returnval){ [ 1492.674677] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]520893af-f0ba-5573-986a-588da4bc22e8" [ 1492.674677] env[62405]: _type = "Task" [ 1492.674677] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1492.679618] env[62405]: DEBUG oslo_vmware.api [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52de5651-c7c2-11bd-ae38-8fe5b280c30c, 'name': SearchDatastore_Task, 'duration_secs': 0.012639} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1492.683071] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1492.683364] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] [instance: 9b71f962-2b92-4f7b-bb8d-b50da5130018] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1492.683575] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1492.688126] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Acquiring lock "b8ff115b-64f1-4584-afa2-478c5e6b726b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1492.688357] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Lock "b8ff115b-64f1-4584-afa2-478c5e6b726b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1492.694209] env[62405]: DEBUG oslo_vmware.api [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]520893af-f0ba-5573-986a-588da4bc22e8, 'name': SearchDatastore_Task, 'duration_secs': 0.012529} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1492.694333] env[62405]: DEBUG oslo_concurrency.lockutils [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1492.694499] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 058682a1-5240-4414-9203-c612ecd12999/058682a1-5240-4414-9203-c612ecd12999.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1492.695492] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1492.695492] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1492.695492] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6da15d03-70fa-42d9-95a4-8c9a457df5cf {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.697791] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ca7b3ed8-bfd7-4b46-a91f-ca610a785abe {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.706637] env[62405]: DEBUG oslo_vmware.api [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Waiting for the task: (returnval){ [ 1492.706637] env[62405]: value = "task-1946745" [ 1492.706637] env[62405]: _type = "Task" [ 1492.706637] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1492.711234] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1492.711343] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1492.713589] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a9faa1c6-11d0-4596-aed5-1eaeb365f374 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.720764] env[62405]: DEBUG oslo_vmware.api [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Waiting for the task: (returnval){ [ 1492.720764] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]523f03e4-2630-9ddb-c7eb-d7150d90ff46" [ 1492.720764] env[62405]: _type = "Task" [ 1492.720764] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1492.724224] env[62405]: DEBUG oslo_vmware.api [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Task: {'id': task-1946745, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1492.733826] env[62405]: DEBUG oslo_vmware.api [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]523f03e4-2630-9ddb-c7eb-d7150d90ff46, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1492.872896] env[62405]: DEBUG nova.compute.utils [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1492.874415] env[62405]: DEBUG nova.compute.manager [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] [instance: 02abae6c-8962-49eb-8fa9-36b13a20eff1] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1492.874595] env[62405]: DEBUG nova.network.neutron [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] [instance: 02abae6c-8962-49eb-8fa9-36b13a20eff1] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1492.972652] env[62405]: INFO nova.compute.manager [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] [instance: 0491dc4b-cf35-4035-aca9-baf43b86af7e] Took 17.63 seconds to build instance. [ 1493.054357] env[62405]: DEBUG nova.network.neutron [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 2257c786-54f9-441a-832c-cf3178bfcc78] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1493.073469] env[62405]: DEBUG nova.policy [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b769f35b27254d918252e9d9a02b2638', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e3c5cc973d264698a415b007cde8bd9f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1493.191140] env[62405]: DEBUG nova.compute.manager [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b8ff115b-64f1-4584-afa2-478c5e6b726b] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1493.223704] env[62405]: DEBUG oslo_vmware.api [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Task: {'id': task-1946745, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1493.239891] env[62405]: DEBUG oslo_vmware.api [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]523f03e4-2630-9ddb-c7eb-d7150d90ff46, 'name': SearchDatastore_Task, 'duration_secs': 0.012314} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1493.241364] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7a284043-1f46-45dc-a4fd-a7628aef8c86 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.250320] env[62405]: DEBUG oslo_vmware.api [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Waiting for the task: (returnval){ [ 1493.250320] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5221fcc6-3b3d-5fb7-6f35-c619d2f37ba3" [ 1493.250320] env[62405]: _type = "Task" [ 1493.250320] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1493.261628] env[62405]: DEBUG oslo_vmware.api [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5221fcc6-3b3d-5fb7-6f35-c619d2f37ba3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1493.379274] env[62405]: DEBUG nova.compute.manager [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] [instance: 02abae6c-8962-49eb-8fa9-36b13a20eff1] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1493.477115] env[62405]: DEBUG oslo_concurrency.lockutils [None req-dd85304c-256d-4dde-be36-b7bd9cd1153b tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Lock "0491dc4b-cf35-4035-aca9-baf43b86af7e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.158s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1493.687315] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-781e25ae-91b0-4c04-98da-8d4da4642b5f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.700264] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-192dd8c0-b759-414c-99c3-14804de96643 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.741328] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1493.745170] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5f3cc71-2681-4d3f-9ad0-e04bebe71758 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.758211] env[62405]: DEBUG oslo_vmware.api [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Task: {'id': task-1946745, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.715363} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1493.761894] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 058682a1-5240-4414-9203-c612ecd12999/058682a1-5240-4414-9203-c612ecd12999.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1493.761894] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 058682a1-5240-4414-9203-c612ecd12999] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1493.762805] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-319d4053-832c-434d-aae4-495f82837ab4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.768116] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9280b2db-de34-420d-91bb-6243ef89e0e1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.785184] env[62405]: DEBUG nova.compute.provider_tree [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1493.792132] env[62405]: DEBUG oslo_vmware.api [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5221fcc6-3b3d-5fb7-6f35-c619d2f37ba3, 'name': SearchDatastore_Task, 'duration_secs': 0.063851} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1493.793275] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1493.793631] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 9b71f962-2b92-4f7b-bb8d-b50da5130018/9b71f962-2b92-4f7b-bb8d-b50da5130018.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1493.798673] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0fa971e9-88ee-48f7-b937-3be828691ece {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.802024] env[62405]: DEBUG oslo_vmware.api [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Waiting for the task: (returnval){ [ 1493.802024] env[62405]: value = "task-1946746" [ 1493.802024] env[62405]: _type = "Task" [ 1493.802024] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1493.804502] env[62405]: DEBUG oslo_vmware.api [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Waiting for the task: (returnval){ [ 1493.804502] env[62405]: value = "task-1946747" [ 1493.804502] env[62405]: _type = "Task" [ 1493.804502] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1493.812051] env[62405]: DEBUG oslo_vmware.api [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Task: {'id': task-1946746, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1493.816307] env[62405]: DEBUG oslo_vmware.api [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Task: {'id': task-1946747, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1493.828276] env[62405]: DEBUG nova.compute.manager [req-cd50360f-ace3-4d32-b7b0-883e0a1acf31 req-72598cc5-6749-4d32-8ade-29e373458f7f service nova] [instance: 0491dc4b-cf35-4035-aca9-baf43b86af7e] Received event network-changed-19538d37-e369-4f7b-8051-61d2c0a7fb00 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1493.829015] env[62405]: DEBUG nova.compute.manager [req-cd50360f-ace3-4d32-b7b0-883e0a1acf31 req-72598cc5-6749-4d32-8ade-29e373458f7f service nova] [instance: 0491dc4b-cf35-4035-aca9-baf43b86af7e] Refreshing instance network info cache due to event network-changed-19538d37-e369-4f7b-8051-61d2c0a7fb00. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1493.829015] env[62405]: DEBUG oslo_concurrency.lockutils [req-cd50360f-ace3-4d32-b7b0-883e0a1acf31 req-72598cc5-6749-4d32-8ade-29e373458f7f service nova] Acquiring lock "refresh_cache-0491dc4b-cf35-4035-aca9-baf43b86af7e" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1493.829172] env[62405]: DEBUG oslo_concurrency.lockutils [req-cd50360f-ace3-4d32-b7b0-883e0a1acf31 req-72598cc5-6749-4d32-8ade-29e373458f7f service nova] Acquired lock "refresh_cache-0491dc4b-cf35-4035-aca9-baf43b86af7e" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1493.829390] env[62405]: DEBUG nova.network.neutron [req-cd50360f-ace3-4d32-b7b0-883e0a1acf31 req-72598cc5-6749-4d32-8ade-29e373458f7f service nova] [instance: 0491dc4b-cf35-4035-aca9-baf43b86af7e] Refreshing network info cache for port 19538d37-e369-4f7b-8051-61d2c0a7fb00 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1493.999181] env[62405]: DEBUG nova.network.neutron [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 2257c786-54f9-441a-832c-cf3178bfcc78] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1494.294737] env[62405]: DEBUG nova.scheduler.client.report [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1494.316402] env[62405]: DEBUG oslo_vmware.api [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Task: {'id': task-1946746, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076858} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1494.317558] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 058682a1-5240-4414-9203-c612ecd12999] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1494.318351] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aca7b0d2-763c-454f-9257-3c2d02775c23 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.331033] env[62405]: DEBUG oslo_vmware.api [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Task: {'id': task-1946747, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1494.331949] env[62405]: DEBUG nova.network.neutron [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] [instance: 02abae6c-8962-49eb-8fa9-36b13a20eff1] Successfully created port: 77f01bbe-48b5-4ad3-b215-90ff9d429d0b {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1494.361657] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 058682a1-5240-4414-9203-c612ecd12999] Reconfiguring VM instance instance-00000007 to attach disk [datastore1] 058682a1-5240-4414-9203-c612ecd12999/058682a1-5240-4414-9203-c612ecd12999.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1494.362834] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5d491760-9635-4c71-903c-fd45b0ab0faa {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.389706] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e791cc57-ae0f-4404-8e7f-e93fe6fdcaf7 tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] Acquiring lock "3f9849b8-6aaa-4d32-b140-207d5b54d68f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1494.389982] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e791cc57-ae0f-4404-8e7f-e93fe6fdcaf7 tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] Lock "3f9849b8-6aaa-4d32-b140-207d5b54d68f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1494.397206] env[62405]: DEBUG nova.compute.manager [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] [instance: 02abae6c-8962-49eb-8fa9-36b13a20eff1] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1494.399832] env[62405]: DEBUG oslo_vmware.api [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Waiting for the task: (returnval){ [ 1494.399832] env[62405]: value = "task-1946748" [ 1494.399832] env[62405]: _type = "Task" [ 1494.399832] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1494.413188] env[62405]: DEBUG oslo_vmware.api [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Task: {'id': task-1946748, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1494.446823] env[62405]: DEBUG nova.virt.hardware [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1494.450671] env[62405]: DEBUG nova.virt.hardware [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1494.450671] env[62405]: DEBUG nova.virt.hardware [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1494.450671] env[62405]: DEBUG nova.virt.hardware [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1494.450671] env[62405]: DEBUG nova.virt.hardware [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1494.450671] env[62405]: DEBUG nova.virt.hardware [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1494.450917] env[62405]: DEBUG nova.virt.hardware [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1494.450917] env[62405]: DEBUG nova.virt.hardware [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1494.451254] env[62405]: DEBUG nova.virt.hardware [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1494.451353] env[62405]: DEBUG nova.virt.hardware [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1494.452130] env[62405]: DEBUG nova.virt.hardware [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1494.452512] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9246913c-bb28-40a7-ac22-1b2887273bd1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.468982] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6046805-7d8d-4e4c-8c4b-16b5379d7ed5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.474497] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Acquiring lock "792cd2c8-a67d-4b16-93ab-722fcc8b622d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1494.474497] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Lock "792cd2c8-a67d-4b16-93ab-722fcc8b622d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1494.504852] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Releasing lock "refresh_cache-2257c786-54f9-441a-832c-cf3178bfcc78" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1494.505073] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 2257c786-54f9-441a-832c-cf3178bfcc78] Updated the network info_cache for instance {{(pid=62405) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10517}} [ 1494.505352] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1494.505596] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62405) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11065}} [ 1494.505759] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager.update_available_resource {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1494.669608] env[62405]: DEBUG nova.network.neutron [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] [instance: 801e7086-5742-4a04-962c-7546284aa12d] Successfully updated port: a6c201e5-eb87-434f-9c74-9f99937836fd {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1494.801258] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.434s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1494.801258] env[62405]: DEBUG nova.compute.manager [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: 8995f9cb-8454-4a98-9090-290f87f8af18] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1494.804870] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.343s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1494.809047] env[62405]: INFO nova.compute.claims [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: a1d35009-ea11-4e64-bbe4-604ed39d08f4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1494.820701] env[62405]: DEBUG oslo_vmware.api [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Task: {'id': task-1946747, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.864837} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1494.820996] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 9b71f962-2b92-4f7b-bb8d-b50da5130018/9b71f962-2b92-4f7b-bb8d-b50da5130018.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1494.821238] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] [instance: 9b71f962-2b92-4f7b-bb8d-b50da5130018] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1494.823091] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1883334e-6857-484a-8b10-51474d4db6ca {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.830088] env[62405]: DEBUG oslo_vmware.api [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Waiting for the task: (returnval){ [ 1494.830088] env[62405]: value = "task-1946749" [ 1494.830088] env[62405]: _type = "Task" [ 1494.830088] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1494.842251] env[62405]: DEBUG oslo_vmware.api [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Task: {'id': task-1946749, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1494.896173] env[62405]: DEBUG nova.compute.manager [None req-e791cc57-ae0f-4404-8e7f-e93fe6fdcaf7 tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] [instance: 3f9849b8-6aaa-4d32-b140-207d5b54d68f] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1494.916807] env[62405]: DEBUG oslo_vmware.api [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Task: {'id': task-1946748, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1495.009224] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1495.172892] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Acquiring lock "refresh_cache-801e7086-5742-4a04-962c-7546284aa12d" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1495.172959] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Acquired lock "refresh_cache-801e7086-5742-4a04-962c-7546284aa12d" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1495.173165] env[62405]: DEBUG nova.network.neutron [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] [instance: 801e7086-5742-4a04-962c-7546284aa12d] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1495.242227] env[62405]: DEBUG nova.network.neutron [req-cd50360f-ace3-4d32-b7b0-883e0a1acf31 req-72598cc5-6749-4d32-8ade-29e373458f7f service nova] [instance: 0491dc4b-cf35-4035-aca9-baf43b86af7e] Updated VIF entry in instance network info cache for port 19538d37-e369-4f7b-8051-61d2c0a7fb00. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1495.242227] env[62405]: DEBUG nova.network.neutron [req-cd50360f-ace3-4d32-b7b0-883e0a1acf31 req-72598cc5-6749-4d32-8ade-29e373458f7f service nova] [instance: 0491dc4b-cf35-4035-aca9-baf43b86af7e] Updating instance_info_cache with network_info: [{"id": "19538d37-e369-4f7b-8051-61d2c0a7fb00", "address": "fa:16:3e:62:6a:7a", "network": {"id": "bf574ed2-2a7e-4cf2-aa38-0adccf456674", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-2099360932-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8cf1f39c8aef41df8c86777f80980664", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60567ee6-01d0-4b16-9c7a-4a896827d6eb", "external-id": "nsx-vlan-transportzone-28", "segmentation_id": 28, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap19538d37-e3", "ovs_interfaceid": "19538d37-e369-4f7b-8051-61d2c0a7fb00", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1495.311278] env[62405]: DEBUG nova.compute.utils [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1495.313586] env[62405]: DEBUG nova.compute.manager [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: 8995f9cb-8454-4a98-9090-290f87f8af18] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1495.313586] env[62405]: DEBUG nova.network.neutron [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: 8995f9cb-8454-4a98-9090-290f87f8af18] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1495.339900] env[62405]: DEBUG nova.compute.manager [None req-6cca1873-572c-402f-9fc5-c544783202da tempest-ServerDiagnosticsV248Test-516254139 tempest-ServerDiagnosticsV248Test-516254139-project-admin] [instance: 2257c786-54f9-441a-832c-cf3178bfcc78] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1495.341642] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a244b13b-98e1-441c-83cc-771e74cf2383 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.347642] env[62405]: DEBUG oslo_vmware.api [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Task: {'id': task-1946749, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.210595} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1495.348275] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] [instance: 9b71f962-2b92-4f7b-bb8d-b50da5130018] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1495.349034] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50836df6-1ca0-4c4e-8b48-b9d563305be6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.353461] env[62405]: INFO nova.compute.manager [None req-6cca1873-572c-402f-9fc5-c544783202da tempest-ServerDiagnosticsV248Test-516254139 tempest-ServerDiagnosticsV248Test-516254139-project-admin] [instance: 2257c786-54f9-441a-832c-cf3178bfcc78] Retrieving diagnostics [ 1495.354637] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f6ab4d8-2c5d-4b82-a754-ea4dc288354d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.377387] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] [instance: 9b71f962-2b92-4f7b-bb8d-b50da5130018] Reconfiguring VM instance instance-00000008 to attach disk [datastore1] 9b71f962-2b92-4f7b-bb8d-b50da5130018/9b71f962-2b92-4f7b-bb8d-b50da5130018.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1495.378145] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e2ce7f50-2f20-40d1-9c01-f5bc48ea4b17 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.428459] env[62405]: DEBUG oslo_vmware.api [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Waiting for the task: (returnval){ [ 1495.428459] env[62405]: value = "task-1946750" [ 1495.428459] env[62405]: _type = "Task" [ 1495.428459] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1495.431715] env[62405]: DEBUG oslo_vmware.api [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Task: {'id': task-1946748, 'name': ReconfigVM_Task, 'duration_secs': 0.89106} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1495.435121] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 058682a1-5240-4414-9203-c612ecd12999] Reconfigured VM instance instance-00000007 to attach disk [datastore1] 058682a1-5240-4414-9203-c612ecd12999/058682a1-5240-4414-9203-c612ecd12999.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1495.435738] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a6fa690f-ba4e-4a89-aa56-f7a58079c0be {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.441116] env[62405]: DEBUG nova.policy [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4dc443f6c0f045fb91313cff57634354', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e05b3582b75842c5908781d74ee041aa', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1495.448301] env[62405]: DEBUG oslo_vmware.api [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Task: {'id': task-1946750, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1495.448301] env[62405]: DEBUG oslo_vmware.api [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Waiting for the task: (returnval){ [ 1495.448301] env[62405]: value = "task-1946751" [ 1495.448301] env[62405]: _type = "Task" [ 1495.448301] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1495.448301] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e791cc57-ae0f-4404-8e7f-e93fe6fdcaf7 tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1495.458232] env[62405]: DEBUG oslo_vmware.api [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Task: {'id': task-1946751, 'name': Rename_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1495.541552] env[62405]: DEBUG nova.network.neutron [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] [instance: 7db1b086-942e-4890-8750-0d717e522786] Successfully updated port: acb33455-b824-40fd-99bd-4628778412a0 {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1495.747089] env[62405]: DEBUG nova.network.neutron [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] [instance: 801e7086-5742-4a04-962c-7546284aa12d] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1495.750599] env[62405]: DEBUG oslo_concurrency.lockutils [req-cd50360f-ace3-4d32-b7b0-883e0a1acf31 req-72598cc5-6749-4d32-8ade-29e373458f7f service nova] Releasing lock "refresh_cache-0491dc4b-cf35-4035-aca9-baf43b86af7e" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1495.816133] env[62405]: DEBUG nova.compute.manager [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: 8995f9cb-8454-4a98-9090-290f87f8af18] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1495.886065] env[62405]: DEBUG nova.network.neutron [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: 8995f9cb-8454-4a98-9090-290f87f8af18] Successfully created port: 4582fcd2-4721-4ad7-9452-5b808488dcb2 {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1495.949597] env[62405]: DEBUG oslo_vmware.api [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Task: {'id': task-1946750, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1495.961416] env[62405]: DEBUG oslo_vmware.api [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Task: {'id': task-1946751, 'name': Rename_Task, 'duration_secs': 0.271823} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1495.961635] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 058682a1-5240-4414-9203-c612ecd12999] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1495.961968] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c39aae80-6719-43c8-9ccf-f7b05b546ec5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.970588] env[62405]: DEBUG oslo_vmware.api [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Waiting for the task: (returnval){ [ 1495.970588] env[62405]: value = "task-1946752" [ 1495.970588] env[62405]: _type = "Task" [ 1495.970588] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1495.984177] env[62405]: DEBUG oslo_vmware.api [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Task: {'id': task-1946752, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1496.046194] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Acquiring lock "refresh_cache-7db1b086-942e-4890-8750-0d717e522786" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1496.046551] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Acquired lock "refresh_cache-7db1b086-942e-4890-8750-0d717e522786" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1496.046802] env[62405]: DEBUG nova.network.neutron [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] [instance: 7db1b086-942e-4890-8750-0d717e522786] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1496.118450] env[62405]: DEBUG nova.network.neutron [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] [instance: 801e7086-5742-4a04-962c-7546284aa12d] Updating instance_info_cache with network_info: [{"id": "a6c201e5-eb87-434f-9c74-9f99937836fd", "address": "fa:16:3e:86:dc:ab", "network": {"id": "9e3e6e3d-df77-428f-b577-e4a42e82ec43", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.74", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "69dc3a146cd14230b1180689e2fea090", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31e77685-b4dd-4810-80ef-24115ea9ea62", "external-id": "nsx-vlan-transportzone-56", "segmentation_id": 56, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa6c201e5-eb", "ovs_interfaceid": "a6c201e5-eb87-434f-9c74-9f99937836fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1496.140690] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4052e10f-48ba-42b2-b87e-d954b28f8d1c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.152167] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9eb75a7a-be81-4408-a29c-beb27ce4f07c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.193355] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09bb7cd1-1bf9-475a-a1a1-fa5eb14add1f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.203069] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00895687-335e-4754-b13f-802deab99361 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.218739] env[62405]: DEBUG nova.compute.provider_tree [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1496.449032] env[62405]: DEBUG oslo_vmware.api [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Task: {'id': task-1946750, 'name': ReconfigVM_Task, 'duration_secs': 0.855756} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1496.449511] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] [instance: 9b71f962-2b92-4f7b-bb8d-b50da5130018] Reconfigured VM instance instance-00000008 to attach disk [datastore1] 9b71f962-2b92-4f7b-bb8d-b50da5130018/9b71f962-2b92-4f7b-bb8d-b50da5130018.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1496.450028] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ad698b7b-a52f-48cc-acf0-34e01965f3d7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.458031] env[62405]: DEBUG oslo_vmware.api [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Waiting for the task: (returnval){ [ 1496.458031] env[62405]: value = "task-1946753" [ 1496.458031] env[62405]: _type = "Task" [ 1496.458031] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1496.467830] env[62405]: DEBUG oslo_vmware.api [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Task: {'id': task-1946753, 'name': Rename_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1496.483783] env[62405]: DEBUG oslo_vmware.api [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Task: {'id': task-1946752, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1496.624265] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Releasing lock "refresh_cache-801e7086-5742-4a04-962c-7546284aa12d" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1496.624702] env[62405]: DEBUG nova.compute.manager [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] [instance: 801e7086-5742-4a04-962c-7546284aa12d] Instance network_info: |[{"id": "a6c201e5-eb87-434f-9c74-9f99937836fd", "address": "fa:16:3e:86:dc:ab", "network": {"id": "9e3e6e3d-df77-428f-b577-e4a42e82ec43", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.74", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "69dc3a146cd14230b1180689e2fea090", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31e77685-b4dd-4810-80ef-24115ea9ea62", "external-id": "nsx-vlan-transportzone-56", "segmentation_id": 56, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa6c201e5-eb", "ovs_interfaceid": "a6c201e5-eb87-434f-9c74-9f99937836fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1496.627017] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] [instance: 801e7086-5742-4a04-962c-7546284aa12d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:86:dc:ab', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '31e77685-b4dd-4810-80ef-24115ea9ea62', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a6c201e5-eb87-434f-9c74-9f99937836fd', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1496.635014] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Creating folder: Project (83958cd7b67e4b9db908c6445af0a129). Parent ref: group-v401284. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1496.638411] env[62405]: DEBUG nova.network.neutron [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] [instance: 7db1b086-942e-4890-8750-0d717e522786] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1496.639745] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7d1af3b9-272c-4c9e-b1c8-34833dfc5565 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.658155] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Created folder: Project (83958cd7b67e4b9db908c6445af0a129) in parent group-v401284. [ 1496.660124] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Creating folder: Instances. Parent ref: group-v401313. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1496.660124] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6742aec7-fb6f-4e2e-bfe3-3a0f5a083b1f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.673188] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Created folder: Instances in parent group-v401313. [ 1496.673188] env[62405]: DEBUG oslo.service.loopingcall [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1496.673188] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 801e7086-5742-4a04-962c-7546284aa12d] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1496.673188] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-218bdc56-8076-47b7-a4d9-89d717b82249 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.702287] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1496.702287] env[62405]: value = "task-1946756" [ 1496.702287] env[62405]: _type = "Task" [ 1496.702287] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1496.723049] env[62405]: DEBUG nova.scheduler.client.report [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1496.726239] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946756, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1496.831522] env[62405]: DEBUG nova.compute.manager [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: 8995f9cb-8454-4a98-9090-290f87f8af18] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1496.864909] env[62405]: DEBUG nova.virt.hardware [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1496.865344] env[62405]: DEBUG nova.virt.hardware [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1496.865344] env[62405]: DEBUG nova.virt.hardware [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1496.865590] env[62405]: DEBUG nova.virt.hardware [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1496.865644] env[62405]: DEBUG nova.virt.hardware [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1496.865788] env[62405]: DEBUG nova.virt.hardware [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1496.866012] env[62405]: DEBUG nova.virt.hardware [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1496.870016] env[62405]: DEBUG nova.virt.hardware [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1496.870261] env[62405]: DEBUG nova.virt.hardware [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1496.870458] env[62405]: DEBUG nova.virt.hardware [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1496.870678] env[62405]: DEBUG nova.virt.hardware [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1496.871655] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9db2a8b7-7c27-4446-bbe3-2e2e750279fc {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.882358] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-808ee84e-205f-4a2d-9edb-8dc10146d706 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.946138] env[62405]: DEBUG nova.network.neutron [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] [instance: 7db1b086-942e-4890-8750-0d717e522786] Updating instance_info_cache with network_info: [{"id": "acb33455-b824-40fd-99bd-4628778412a0", "address": "fa:16:3e:c6:b0:18", "network": {"id": "24a4e5e0-178e-4713-b3b3-db2044169596", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1947982707-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "4ac6737e7e8649d5a1061806cb927ed6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bfae3ef8-cae7-455d-8632-ba93e1671625", "external-id": "cl2-zone-841", "segmentation_id": 841, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapacb33455-b8", "ovs_interfaceid": "acb33455-b824-40fd-99bd-4628778412a0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1496.975343] env[62405]: DEBUG oslo_vmware.api [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Task: {'id': task-1946753, 'name': Rename_Task, 'duration_secs': 0.215958} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1496.975343] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] [instance: 9b71f962-2b92-4f7b-bb8d-b50da5130018] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1496.975343] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8feb8856-b866-4626-ad6f-2303fa04962f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.988692] env[62405]: DEBUG oslo_vmware.api [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Task: {'id': task-1946752, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1496.993379] env[62405]: DEBUG oslo_vmware.api [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Waiting for the task: (returnval){ [ 1496.993379] env[62405]: value = "task-1946757" [ 1496.993379] env[62405]: _type = "Task" [ 1496.993379] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1497.002531] env[62405]: DEBUG oslo_vmware.api [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Task: {'id': task-1946757, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1497.217675] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946756, 'name': CreateVM_Task, 'duration_secs': 0.431368} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1497.217835] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 801e7086-5742-4a04-962c-7546284aa12d] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1497.218546] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1497.218709] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1497.220955] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1497.221292] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ccbfd304-d832-4241-81e7-e681a911641d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.228208] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.423s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1497.228712] env[62405]: DEBUG nova.compute.manager [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: a1d35009-ea11-4e64-bbe4-604ed39d08f4] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1497.231407] env[62405]: DEBUG oslo_vmware.api [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Waiting for the task: (returnval){ [ 1497.231407] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52770d09-5c64-3e2e-9b73-dde124c38e78" [ 1497.231407] env[62405]: _type = "Task" [ 1497.231407] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1497.231767] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.206s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1497.234214] env[62405]: INFO nova.compute.claims [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: ca0aca02-4b99-4393-900c-b9cb0dad55c7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1497.250885] env[62405]: DEBUG oslo_vmware.api [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52770d09-5c64-3e2e-9b73-dde124c38e78, 'name': SearchDatastore_Task, 'duration_secs': 0.012993} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1497.250885] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1497.251201] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] [instance: 801e7086-5742-4a04-962c-7546284aa12d] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1497.251598] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1497.251598] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1497.251598] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1497.251831] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dccd027d-7ac3-4282-94d8-4d5e68127379 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.267518] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1497.267721] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1497.268494] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9e62477f-89f0-49f0-9d23-cb9ac637a247 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.277559] env[62405]: DEBUG oslo_vmware.api [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Waiting for the task: (returnval){ [ 1497.277559] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52f4ef8c-fe27-0120-dc55-f1953aa44a16" [ 1497.277559] env[62405]: _type = "Task" [ 1497.277559] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1497.288611] env[62405]: DEBUG oslo_vmware.api [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52f4ef8c-fe27-0120-dc55-f1953aa44a16, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1497.449401] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Releasing lock "refresh_cache-7db1b086-942e-4890-8750-0d717e522786" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1497.449924] env[62405]: DEBUG nova.compute.manager [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] [instance: 7db1b086-942e-4890-8750-0d717e522786] Instance network_info: |[{"id": "acb33455-b824-40fd-99bd-4628778412a0", "address": "fa:16:3e:c6:b0:18", "network": {"id": "24a4e5e0-178e-4713-b3b3-db2044169596", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1947982707-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "4ac6737e7e8649d5a1061806cb927ed6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bfae3ef8-cae7-455d-8632-ba93e1671625", "external-id": "cl2-zone-841", "segmentation_id": 841, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapacb33455-b8", "ovs_interfaceid": "acb33455-b824-40fd-99bd-4628778412a0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1497.450349] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] [instance: 7db1b086-942e-4890-8750-0d717e522786] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c6:b0:18', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bfae3ef8-cae7-455d-8632-ba93e1671625', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'acb33455-b824-40fd-99bd-4628778412a0', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1497.457940] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Creating folder: Project (4ac6737e7e8649d5a1061806cb927ed6). Parent ref: group-v401284. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1497.458310] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4a0ffa12-125c-4f49-8081-cccd77fb17a9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.469986] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Created folder: Project (4ac6737e7e8649d5a1061806cb927ed6) in parent group-v401284. [ 1497.472247] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Creating folder: Instances. Parent ref: group-v401316. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1497.472247] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1e9143d2-b6f5-4e9c-8fc9-c487ba841dcd {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.482350] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Created folder: Instances in parent group-v401316. [ 1497.482633] env[62405]: DEBUG oslo.service.loopingcall [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1497.485875] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7db1b086-942e-4890-8750-0d717e522786] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1497.486189] env[62405]: DEBUG oslo_vmware.api [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Task: {'id': task-1946752, 'name': PowerOnVM_Task, 'duration_secs': 1.096683} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1497.486688] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4f33b222-094b-4729-9ddd-1a89053cd269 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.500548] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 058682a1-5240-4414-9203-c612ecd12999] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1497.500904] env[62405]: INFO nova.compute.manager [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 058682a1-5240-4414-9203-c612ecd12999] Took 13.78 seconds to spawn the instance on the hypervisor. [ 1497.500980] env[62405]: DEBUG nova.compute.manager [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 058682a1-5240-4414-9203-c612ecd12999] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1497.501870] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7af859ca-20da-4a6f-b7bc-ea5455998403 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.519058] env[62405]: DEBUG oslo_vmware.api [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Task: {'id': task-1946757, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1497.521314] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1497.521314] env[62405]: value = "task-1946760" [ 1497.521314] env[62405]: _type = "Task" [ 1497.521314] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1497.530354] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946760, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1497.713482] env[62405]: DEBUG oslo_concurrency.lockutils [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Acquiring lock "0eec4a5f-9f9b-4a86-a046-2e2d107adc48" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1497.716171] env[62405]: DEBUG oslo_concurrency.lockutils [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Lock "0eec4a5f-9f9b-4a86-a046-2e2d107adc48" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1497.745607] env[62405]: DEBUG nova.compute.utils [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1497.747344] env[62405]: DEBUG nova.compute.manager [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: a1d35009-ea11-4e64-bbe4-604ed39d08f4] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1497.748760] env[62405]: DEBUG nova.network.neutron [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: a1d35009-ea11-4e64-bbe4-604ed39d08f4] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1497.801893] env[62405]: DEBUG oslo_vmware.api [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52f4ef8c-fe27-0120-dc55-f1953aa44a16, 'name': SearchDatastore_Task, 'duration_secs': 0.010738} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1497.802467] env[62405]: DEBUG nova.compute.manager [req-02cbbab8-b234-4299-a1ba-891e8c39c234 req-989af179-52b2-4777-b722-bf6aa5452ebc service nova] [instance: 058682a1-5240-4414-9203-c612ecd12999] Received event network-changed-f2f99aa3-770a-41cb-bb49-775f9f0f2708 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1497.802587] env[62405]: DEBUG nova.compute.manager [req-02cbbab8-b234-4299-a1ba-891e8c39c234 req-989af179-52b2-4777-b722-bf6aa5452ebc service nova] [instance: 058682a1-5240-4414-9203-c612ecd12999] Refreshing instance network info cache due to event network-changed-f2f99aa3-770a-41cb-bb49-775f9f0f2708. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1497.802838] env[62405]: DEBUG oslo_concurrency.lockutils [req-02cbbab8-b234-4299-a1ba-891e8c39c234 req-989af179-52b2-4777-b722-bf6aa5452ebc service nova] Acquiring lock "refresh_cache-058682a1-5240-4414-9203-c612ecd12999" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1497.802975] env[62405]: DEBUG oslo_concurrency.lockutils [req-02cbbab8-b234-4299-a1ba-891e8c39c234 req-989af179-52b2-4777-b722-bf6aa5452ebc service nova] Acquired lock "refresh_cache-058682a1-5240-4414-9203-c612ecd12999" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1497.805585] env[62405]: DEBUG nova.network.neutron [req-02cbbab8-b234-4299-a1ba-891e8c39c234 req-989af179-52b2-4777-b722-bf6aa5452ebc service nova] [instance: 058682a1-5240-4414-9203-c612ecd12999] Refreshing network info cache for port f2f99aa3-770a-41cb-bb49-775f9f0f2708 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1497.812479] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2cbe4602-e232-408b-b334-250a7642e227 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.826438] env[62405]: DEBUG oslo_vmware.api [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Waiting for the task: (returnval){ [ 1497.826438] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52fcfc6f-385a-9e24-4f92-49a7d06a17b1" [ 1497.826438] env[62405]: _type = "Task" [ 1497.826438] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1497.831265] env[62405]: DEBUG nova.policy [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4dc443f6c0f045fb91313cff57634354', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e05b3582b75842c5908781d74ee041aa', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1497.838280] env[62405]: DEBUG oslo_vmware.api [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52fcfc6f-385a-9e24-4f92-49a7d06a17b1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.013711] env[62405]: DEBUG oslo_vmware.api [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Task: {'id': task-1946757, 'name': PowerOnVM_Task, 'duration_secs': 0.9604} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1498.013711] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] [instance: 9b71f962-2b92-4f7b-bb8d-b50da5130018] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1498.013711] env[62405]: INFO nova.compute.manager [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] [instance: 9b71f962-2b92-4f7b-bb8d-b50da5130018] Took 10.94 seconds to spawn the instance on the hypervisor. [ 1498.013711] env[62405]: DEBUG nova.compute.manager [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] [instance: 9b71f962-2b92-4f7b-bb8d-b50da5130018] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1498.014525] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-910ff56b-b4b3-4cd4-9973-9264d4e001d2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.032850] env[62405]: INFO nova.compute.manager [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 058682a1-5240-4414-9203-c612ecd12999] Took 21.88 seconds to build instance. [ 1498.047011] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946760, 'name': CreateVM_Task, 'duration_secs': 0.412894} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1498.047011] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7db1b086-942e-4890-8750-0d717e522786] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1498.047011] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1498.047011] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1498.047011] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1498.047285] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f26dc56b-12f7-4343-843a-0b11b0a595f4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.054593] env[62405]: DEBUG oslo_vmware.api [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Waiting for the task: (returnval){ [ 1498.054593] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52bd4536-70d5-1a18-e0e3-81bf31ad5044" [ 1498.054593] env[62405]: _type = "Task" [ 1498.054593] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1498.065777] env[62405]: DEBUG oslo_vmware.api [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52bd4536-70d5-1a18-e0e3-81bf31ad5044, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.132423] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e64ab66a-be13-4796-b011-3af711f24e74 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Acquiring lock "b3647042-89a1-4d15-b85e-49a5c8def1d4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1498.132731] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e64ab66a-be13-4796-b011-3af711f24e74 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Lock "b3647042-89a1-4d15-b85e-49a5c8def1d4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1498.254406] env[62405]: DEBUG nova.compute.manager [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: a1d35009-ea11-4e64-bbe4-604ed39d08f4] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1498.345927] env[62405]: DEBUG oslo_vmware.api [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52fcfc6f-385a-9e24-4f92-49a7d06a17b1, 'name': SearchDatastore_Task, 'duration_secs': 0.056986} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1498.349046] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1498.349046] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 801e7086-5742-4a04-962c-7546284aa12d/801e7086-5742-4a04-962c-7546284aa12d.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1498.349444] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a169003e-58ae-4570-97ef-aa4ed7187e52 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.358421] env[62405]: DEBUG oslo_vmware.api [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Waiting for the task: (returnval){ [ 1498.358421] env[62405]: value = "task-1946761" [ 1498.358421] env[62405]: _type = "Task" [ 1498.358421] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1498.370029] env[62405]: DEBUG oslo_vmware.api [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Task: {'id': task-1946761, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.456180] env[62405]: DEBUG nova.network.neutron [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: a1d35009-ea11-4e64-bbe4-604ed39d08f4] Successfully created port: d385dca6-fc58-4113-bd50-3886fbe12d53 {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1498.534485] env[62405]: DEBUG oslo_concurrency.lockutils [None req-80154eb3-7ca5-48c5-ab7a-b486f003bfea tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Lock "058682a1-5240-4414-9203-c612ecd12999" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.396s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1498.580538] env[62405]: INFO nova.compute.manager [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] [instance: 9b71f962-2b92-4f7b-bb8d-b50da5130018] Took 21.47 seconds to build instance. [ 1498.589107] env[62405]: DEBUG oslo_vmware.api [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52bd4536-70d5-1a18-e0e3-81bf31ad5044, 'name': SearchDatastore_Task, 'duration_secs': 0.01597} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1498.592170] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1498.592170] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] [instance: 7db1b086-942e-4890-8750-0d717e522786] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1498.592170] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1498.592170] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1498.592427] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1498.599379] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-25f13d4c-9600-4dd8-b17d-b1d0dc4a65b2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.602770] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Acquiring lock "fbedaa93-5968-4b42-b93e-201d2b44b32b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1498.604447] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Lock "fbedaa93-5968-4b42-b93e-201d2b44b32b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1498.623950] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1498.624194] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1498.627558] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-30ab4bf7-4832-43d7-a5d7-38b7083e60fd {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.635830] env[62405]: DEBUG oslo_vmware.api [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Waiting for the task: (returnval){ [ 1498.635830] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52110b5e-6179-7bb6-337b-5fbb36897be2" [ 1498.635830] env[62405]: _type = "Task" [ 1498.635830] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1498.654314] env[62405]: DEBUG oslo_vmware.api [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52110b5e-6179-7bb6-337b-5fbb36897be2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.701614] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7eb37233-64e1-4fca-802f-e3edb3d5e25d tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquiring lock "8624629d-642a-4adf-984e-3925beeb4fef" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1498.701863] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7eb37233-64e1-4fca-802f-e3edb3d5e25d tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Lock "8624629d-642a-4adf-984e-3925beeb4fef" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1498.702059] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7eb37233-64e1-4fca-802f-e3edb3d5e25d tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquiring lock "8624629d-642a-4adf-984e-3925beeb4fef-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1498.702148] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7eb37233-64e1-4fca-802f-e3edb3d5e25d tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Lock "8624629d-642a-4adf-984e-3925beeb4fef-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1498.702309] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7eb37233-64e1-4fca-802f-e3edb3d5e25d tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Lock "8624629d-642a-4adf-984e-3925beeb4fef-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1498.704743] env[62405]: INFO nova.compute.manager [None req-7eb37233-64e1-4fca-802f-e3edb3d5e25d tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 8624629d-642a-4adf-984e-3925beeb4fef] Terminating instance [ 1498.792511] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ac83d86-582a-403b-8c05-8e9d60e8a4b8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.804021] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4af4a3d5-afba-47d6-bc75-8b3c181d3670 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.848828] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-077748f6-bb39-4a0f-befa-b52cd3e7cc2f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.863274] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec2f11eb-8f74-4eff-9d28-687e428e6d71 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.875938] env[62405]: DEBUG oslo_vmware.api [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Task: {'id': task-1946761, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.886310] env[62405]: DEBUG nova.compute.provider_tree [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1498.964045] env[62405]: DEBUG nova.network.neutron [req-02cbbab8-b234-4299-a1ba-891e8c39c234 req-989af179-52b2-4777-b722-bf6aa5452ebc service nova] [instance: 058682a1-5240-4414-9203-c612ecd12999] Updated VIF entry in instance network info cache for port f2f99aa3-770a-41cb-bb49-775f9f0f2708. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1498.964462] env[62405]: DEBUG nova.network.neutron [req-02cbbab8-b234-4299-a1ba-891e8c39c234 req-989af179-52b2-4777-b722-bf6aa5452ebc service nova] [instance: 058682a1-5240-4414-9203-c612ecd12999] Updating instance_info_cache with network_info: [{"id": "f2f99aa3-770a-41cb-bb49-775f9f0f2708", "address": "fa:16:3e:ed:e3:4c", "network": {"id": "9e3e6e3d-df77-428f-b577-e4a42e82ec43", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.165", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "69dc3a146cd14230b1180689e2fea090", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31e77685-b4dd-4810-80ef-24115ea9ea62", "external-id": "nsx-vlan-transportzone-56", "segmentation_id": 56, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf2f99aa3-77", "ovs_interfaceid": "f2f99aa3-770a-41cb-bb49-775f9f0f2708", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1499.057581] env[62405]: DEBUG nova.compute.manager [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] [instance: 792cd2c8-a67d-4b16-93ab-722fcc8b622d] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1499.091250] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0f356b31-0d5f-4e69-b19e-2f98ba91958c tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Lock "9b71f962-2b92-4f7b-bb8d-b50da5130018" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.986s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1499.141213] env[62405]: DEBUG oslo_concurrency.lockutils [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Acquiring lock "777ddb84-25b9-4da6-be6b-a2289dbf510a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1499.141449] env[62405]: DEBUG oslo_concurrency.lockutils [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Lock "777ddb84-25b9-4da6-be6b-a2289dbf510a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1499.153084] env[62405]: DEBUG oslo_vmware.api [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52110b5e-6179-7bb6-337b-5fbb36897be2, 'name': SearchDatastore_Task, 'duration_secs': 0.054134} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1499.154637] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-12b1ab57-602a-4b17-a700-4a615f82d648 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.162166] env[62405]: DEBUG oslo_vmware.api [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Waiting for the task: (returnval){ [ 1499.162166] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5222612b-7058-d3c1-9736-47a7a73c3289" [ 1499.162166] env[62405]: _type = "Task" [ 1499.162166] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1499.173201] env[62405]: DEBUG oslo_vmware.api [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5222612b-7058-d3c1-9736-47a7a73c3289, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1499.209623] env[62405]: DEBUG nova.compute.manager [None req-7eb37233-64e1-4fca-802f-e3edb3d5e25d tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 8624629d-642a-4adf-984e-3925beeb4fef] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1499.209910] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-7eb37233-64e1-4fca-802f-e3edb3d5e25d tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 8624629d-642a-4adf-984e-3925beeb4fef] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1499.210768] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-745cad3e-d1a8-456e-b12f-7181fc3a91dd {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.220142] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-7eb37233-64e1-4fca-802f-e3edb3d5e25d tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 8624629d-642a-4adf-984e-3925beeb4fef] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1499.220724] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2e6bb3eb-7dba-4a64-aa41-d2c1f64e9c68 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.228545] env[62405]: DEBUG oslo_vmware.api [None req-7eb37233-64e1-4fca-802f-e3edb3d5e25d tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for the task: (returnval){ [ 1499.228545] env[62405]: value = "task-1946762" [ 1499.228545] env[62405]: _type = "Task" [ 1499.228545] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1499.238391] env[62405]: DEBUG oslo_vmware.api [None req-7eb37233-64e1-4fca-802f-e3edb3d5e25d tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1946762, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1499.270833] env[62405]: DEBUG nova.compute.manager [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: a1d35009-ea11-4e64-bbe4-604ed39d08f4] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1499.294107] env[62405]: DEBUG nova.virt.hardware [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1499.294464] env[62405]: DEBUG nova.virt.hardware [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1499.294543] env[62405]: DEBUG nova.virt.hardware [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1499.294712] env[62405]: DEBUG nova.virt.hardware [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1499.294848] env[62405]: DEBUG nova.virt.hardware [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1499.294995] env[62405]: DEBUG nova.virt.hardware [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1499.295225] env[62405]: DEBUG nova.virt.hardware [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1499.295383] env[62405]: DEBUG nova.virt.hardware [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1499.295548] env[62405]: DEBUG nova.virt.hardware [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1499.295723] env[62405]: DEBUG nova.virt.hardware [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1499.295887] env[62405]: DEBUG nova.virt.hardware [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1499.296844] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e02d5bb-12bb-4dfc-8f91-cd4bc1411431 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.306806] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94905bc6-4160-4706-b6e1-03ddc147ff21 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.369980] env[62405]: DEBUG oslo_vmware.api [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Task: {'id': task-1946761, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.528186} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1499.370500] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 801e7086-5742-4a04-962c-7546284aa12d/801e7086-5742-4a04-962c-7546284aa12d.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1499.371783] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] [instance: 801e7086-5742-4a04-962c-7546284aa12d] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1499.371783] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4e8582dc-bc7c-4bc9-9427-c01c664a391d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.381239] env[62405]: DEBUG oslo_vmware.api [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Waiting for the task: (returnval){ [ 1499.381239] env[62405]: value = "task-1946763" [ 1499.381239] env[62405]: _type = "Task" [ 1499.381239] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1499.391668] env[62405]: DEBUG nova.scheduler.client.report [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1499.395079] env[62405]: DEBUG oslo_vmware.api [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Task: {'id': task-1946763, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1499.468264] env[62405]: DEBUG oslo_concurrency.lockutils [req-02cbbab8-b234-4299-a1ba-891e8c39c234 req-989af179-52b2-4777-b722-bf6aa5452ebc service nova] Releasing lock "refresh_cache-058682a1-5240-4414-9203-c612ecd12999" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1499.468510] env[62405]: DEBUG nova.compute.manager [req-02cbbab8-b234-4299-a1ba-891e8c39c234 req-989af179-52b2-4777-b722-bf6aa5452ebc service nova] [instance: 801e7086-5742-4a04-962c-7546284aa12d] Received event network-vif-plugged-a6c201e5-eb87-434f-9c74-9f99937836fd {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1499.468510] env[62405]: DEBUG oslo_concurrency.lockutils [req-02cbbab8-b234-4299-a1ba-891e8c39c234 req-989af179-52b2-4777-b722-bf6aa5452ebc service nova] Acquiring lock "801e7086-5742-4a04-962c-7546284aa12d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1499.468707] env[62405]: DEBUG oslo_concurrency.lockutils [req-02cbbab8-b234-4299-a1ba-891e8c39c234 req-989af179-52b2-4777-b722-bf6aa5452ebc service nova] Lock "801e7086-5742-4a04-962c-7546284aa12d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1499.468849] env[62405]: DEBUG oslo_concurrency.lockutils [req-02cbbab8-b234-4299-a1ba-891e8c39c234 req-989af179-52b2-4777-b722-bf6aa5452ebc service nova] Lock "801e7086-5742-4a04-962c-7546284aa12d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1499.469074] env[62405]: DEBUG nova.compute.manager [req-02cbbab8-b234-4299-a1ba-891e8c39c234 req-989af179-52b2-4777-b722-bf6aa5452ebc service nova] [instance: 801e7086-5742-4a04-962c-7546284aa12d] No waiting events found dispatching network-vif-plugged-a6c201e5-eb87-434f-9c74-9f99937836fd {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1499.469259] env[62405]: WARNING nova.compute.manager [req-02cbbab8-b234-4299-a1ba-891e8c39c234 req-989af179-52b2-4777-b722-bf6aa5452ebc service nova] [instance: 801e7086-5742-4a04-962c-7546284aa12d] Received unexpected event network-vif-plugged-a6c201e5-eb87-434f-9c74-9f99937836fd for instance with vm_state building and task_state spawning. [ 1499.590908] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1499.596562] env[62405]: DEBUG nova.compute.manager [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] [instance: 0eec4a5f-9f9b-4a86-a046-2e2d107adc48] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1499.682096] env[62405]: DEBUG oslo_vmware.api [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5222612b-7058-d3c1-9736-47a7a73c3289, 'name': SearchDatastore_Task, 'duration_secs': 0.038431} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1499.682401] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1499.682661] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 7db1b086-942e-4890-8750-0d717e522786/7db1b086-942e-4890-8750-0d717e522786.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1499.682970] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9126c850-448c-4607-aa4d-d4ebe563703e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.699259] env[62405]: DEBUG oslo_vmware.api [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Waiting for the task: (returnval){ [ 1499.699259] env[62405]: value = "task-1946764" [ 1499.699259] env[62405]: _type = "Task" [ 1499.699259] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1499.744964] env[62405]: DEBUG oslo_vmware.api [None req-7eb37233-64e1-4fca-802f-e3edb3d5e25d tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1946762, 'name': PowerOffVM_Task, 'duration_secs': 0.229368} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1499.744964] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-7eb37233-64e1-4fca-802f-e3edb3d5e25d tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 8624629d-642a-4adf-984e-3925beeb4fef] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1499.744964] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-7eb37233-64e1-4fca-802f-e3edb3d5e25d tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 8624629d-642a-4adf-984e-3925beeb4fef] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1499.744964] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f6dd03c8-3e7a-430a-84ed-e3ec6aa9c28a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.835605] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-7eb37233-64e1-4fca-802f-e3edb3d5e25d tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 8624629d-642a-4adf-984e-3925beeb4fef] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1499.835850] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-7eb37233-64e1-4fca-802f-e3edb3d5e25d tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 8624629d-642a-4adf-984e-3925beeb4fef] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1499.836052] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-7eb37233-64e1-4fca-802f-e3edb3d5e25d tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Deleting the datastore file [datastore1] 8624629d-642a-4adf-984e-3925beeb4fef {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1499.836323] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-782a6949-2c2f-46e1-bd3a-6cf4cf8012de {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.844750] env[62405]: DEBUG oslo_vmware.api [None req-7eb37233-64e1-4fca-802f-e3edb3d5e25d tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for the task: (returnval){ [ 1499.844750] env[62405]: value = "task-1946766" [ 1499.844750] env[62405]: _type = "Task" [ 1499.844750] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1499.853945] env[62405]: DEBUG oslo_vmware.api [None req-7eb37233-64e1-4fca-802f-e3edb3d5e25d tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1946766, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1499.895619] env[62405]: DEBUG oslo_vmware.api [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Task: {'id': task-1946763, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.090029} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1499.895619] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] [instance: 801e7086-5742-4a04-962c-7546284aa12d] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1499.897028] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fee7b26-15fc-487e-baae-d9abfd780a54 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.902837] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.671s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1499.903404] env[62405]: DEBUG nova.compute.manager [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: ca0aca02-4b99-4393-900c-b9cb0dad55c7] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1499.906183] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.165s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1499.907604] env[62405]: INFO nova.compute.claims [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b8ff115b-64f1-4584-afa2-478c5e6b726b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1499.933500] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] [instance: 801e7086-5742-4a04-962c-7546284aa12d] Reconfiguring VM instance instance-00000009 to attach disk [datastore1] 801e7086-5742-4a04-962c-7546284aa12d/801e7086-5742-4a04-962c-7546284aa12d.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1499.933787] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8013a7dc-8b18-4452-a485-2e505fc1d452 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.959177] env[62405]: DEBUG oslo_vmware.api [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Waiting for the task: (returnval){ [ 1499.959177] env[62405]: value = "task-1946767" [ 1499.959177] env[62405]: _type = "Task" [ 1499.959177] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1499.970658] env[62405]: DEBUG oslo_vmware.api [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Task: {'id': task-1946767, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1500.137381] env[62405]: DEBUG oslo_concurrency.lockutils [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1500.148371] env[62405]: DEBUG oslo_concurrency.lockutils [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Acquiring lock "f8c6f99f-499f-4886-aae9-5f08969175f6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1500.149349] env[62405]: DEBUG oslo_concurrency.lockutils [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Lock "f8c6f99f-499f-4886-aae9-5f08969175f6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1500.212232] env[62405]: DEBUG oslo_vmware.api [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Task: {'id': task-1946764, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1500.357436] env[62405]: DEBUG oslo_vmware.api [None req-7eb37233-64e1-4fca-802f-e3edb3d5e25d tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1946766, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1500.403840] env[62405]: DEBUG nova.network.neutron [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] [instance: 02abae6c-8962-49eb-8fa9-36b13a20eff1] Successfully updated port: 77f01bbe-48b5-4ad3-b215-90ff9d429d0b {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1500.413496] env[62405]: DEBUG nova.compute.utils [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1500.417321] env[62405]: DEBUG nova.compute.manager [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: ca0aca02-4b99-4393-900c-b9cb0dad55c7] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1500.417518] env[62405]: DEBUG nova.network.neutron [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: ca0aca02-4b99-4393-900c-b9cb0dad55c7] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1500.449219] env[62405]: DEBUG nova.compute.manager [req-d1d87691-4220-4d7d-b619-32de4739f551 req-35156eb8-5884-426a-9287-09b19f3d09fc service nova] [instance: 9b71f962-2b92-4f7b-bb8d-b50da5130018] Received event network-vif-plugged-0805ecfc-d6ef-4bff-a3a1-0f8af74e57a0 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1500.450371] env[62405]: DEBUG oslo_concurrency.lockutils [req-d1d87691-4220-4d7d-b619-32de4739f551 req-35156eb8-5884-426a-9287-09b19f3d09fc service nova] Acquiring lock "9b71f962-2b92-4f7b-bb8d-b50da5130018-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1500.450856] env[62405]: DEBUG oslo_concurrency.lockutils [req-d1d87691-4220-4d7d-b619-32de4739f551 req-35156eb8-5884-426a-9287-09b19f3d09fc service nova] Lock "9b71f962-2b92-4f7b-bb8d-b50da5130018-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1500.451254] env[62405]: DEBUG oslo_concurrency.lockutils [req-d1d87691-4220-4d7d-b619-32de4739f551 req-35156eb8-5884-426a-9287-09b19f3d09fc service nova] Lock "9b71f962-2b92-4f7b-bb8d-b50da5130018-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1500.451617] env[62405]: DEBUG nova.compute.manager [req-d1d87691-4220-4d7d-b619-32de4739f551 req-35156eb8-5884-426a-9287-09b19f3d09fc service nova] [instance: 9b71f962-2b92-4f7b-bb8d-b50da5130018] No waiting events found dispatching network-vif-plugged-0805ecfc-d6ef-4bff-a3a1-0f8af74e57a0 {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1500.452167] env[62405]: WARNING nova.compute.manager [req-d1d87691-4220-4d7d-b619-32de4739f551 req-35156eb8-5884-426a-9287-09b19f3d09fc service nova] [instance: 9b71f962-2b92-4f7b-bb8d-b50da5130018] Received unexpected event network-vif-plugged-0805ecfc-d6ef-4bff-a3a1-0f8af74e57a0 for instance with vm_state active and task_state None. [ 1500.452529] env[62405]: DEBUG nova.compute.manager [req-d1d87691-4220-4d7d-b619-32de4739f551 req-35156eb8-5884-426a-9287-09b19f3d09fc service nova] [instance: 9b71f962-2b92-4f7b-bb8d-b50da5130018] Received event network-changed-0805ecfc-d6ef-4bff-a3a1-0f8af74e57a0 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1500.452914] env[62405]: DEBUG nova.compute.manager [req-d1d87691-4220-4d7d-b619-32de4739f551 req-35156eb8-5884-426a-9287-09b19f3d09fc service nova] [instance: 9b71f962-2b92-4f7b-bb8d-b50da5130018] Refreshing instance network info cache due to event network-changed-0805ecfc-d6ef-4bff-a3a1-0f8af74e57a0. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1500.453273] env[62405]: DEBUG oslo_concurrency.lockutils [req-d1d87691-4220-4d7d-b619-32de4739f551 req-35156eb8-5884-426a-9287-09b19f3d09fc service nova] Acquiring lock "refresh_cache-9b71f962-2b92-4f7b-bb8d-b50da5130018" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1500.453547] env[62405]: DEBUG oslo_concurrency.lockutils [req-d1d87691-4220-4d7d-b619-32de4739f551 req-35156eb8-5884-426a-9287-09b19f3d09fc service nova] Acquired lock "refresh_cache-9b71f962-2b92-4f7b-bb8d-b50da5130018" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1500.453931] env[62405]: DEBUG nova.network.neutron [req-d1d87691-4220-4d7d-b619-32de4739f551 req-35156eb8-5884-426a-9287-09b19f3d09fc service nova] [instance: 9b71f962-2b92-4f7b-bb8d-b50da5130018] Refreshing network info cache for port 0805ecfc-d6ef-4bff-a3a1-0f8af74e57a0 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1500.474801] env[62405]: DEBUG oslo_vmware.api [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Task: {'id': task-1946767, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1500.476825] env[62405]: DEBUG nova.policy [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4dc443f6c0f045fb91313cff57634354', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e05b3582b75842c5908781d74ee041aa', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1500.711057] env[62405]: DEBUG oslo_vmware.api [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Task: {'id': task-1946764, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.651748} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1500.711486] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 7db1b086-942e-4890-8750-0d717e522786/7db1b086-942e-4890-8750-0d717e522786.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1500.712242] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] [instance: 7db1b086-942e-4890-8750-0d717e522786] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1500.712506] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a4a72f0d-d779-4ed1-8fde-7dec3e6ccb6d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.721727] env[62405]: DEBUG oslo_vmware.api [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Waiting for the task: (returnval){ [ 1500.721727] env[62405]: value = "task-1946771" [ 1500.721727] env[62405]: _type = "Task" [ 1500.721727] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1500.733394] env[62405]: DEBUG oslo_vmware.api [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Task: {'id': task-1946771, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1500.801289] env[62405]: DEBUG nova.network.neutron [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: ca0aca02-4b99-4393-900c-b9cb0dad55c7] Successfully created port: 2df3353e-cc22-401d-ba57-099a6e08d7e7 {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1500.859730] env[62405]: DEBUG oslo_vmware.api [None req-7eb37233-64e1-4fca-802f-e3edb3d5e25d tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1946766, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.528655} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1500.860154] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-7eb37233-64e1-4fca-802f-e3edb3d5e25d tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1500.860391] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-7eb37233-64e1-4fca-802f-e3edb3d5e25d tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 8624629d-642a-4adf-984e-3925beeb4fef] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1500.860570] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-7eb37233-64e1-4fca-802f-e3edb3d5e25d tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 8624629d-642a-4adf-984e-3925beeb4fef] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1500.860910] env[62405]: INFO nova.compute.manager [None req-7eb37233-64e1-4fca-802f-e3edb3d5e25d tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 8624629d-642a-4adf-984e-3925beeb4fef] Took 1.65 seconds to destroy the instance on the hypervisor. [ 1500.861236] env[62405]: DEBUG oslo.service.loopingcall [None req-7eb37233-64e1-4fca-802f-e3edb3d5e25d tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1500.861524] env[62405]: DEBUG nova.compute.manager [-] [instance: 8624629d-642a-4adf-984e-3925beeb4fef] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1500.861628] env[62405]: DEBUG nova.network.neutron [-] [instance: 8624629d-642a-4adf-984e-3925beeb4fef] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1500.907299] env[62405]: DEBUG oslo_concurrency.lockutils [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Acquiring lock "refresh_cache-02abae6c-8962-49eb-8fa9-36b13a20eff1" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1500.907440] env[62405]: DEBUG oslo_concurrency.lockutils [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Acquired lock "refresh_cache-02abae6c-8962-49eb-8fa9-36b13a20eff1" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1500.907604] env[62405]: DEBUG nova.network.neutron [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] [instance: 02abae6c-8962-49eb-8fa9-36b13a20eff1] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1500.918295] env[62405]: DEBUG nova.compute.manager [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: ca0aca02-4b99-4393-900c-b9cb0dad55c7] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1500.977310] env[62405]: DEBUG oslo_vmware.api [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Task: {'id': task-1946767, 'name': ReconfigVM_Task, 'duration_secs': 0.761327} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1500.977840] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] [instance: 801e7086-5742-4a04-962c-7546284aa12d] Reconfigured VM instance instance-00000009 to attach disk [datastore1] 801e7086-5742-4a04-962c-7546284aa12d/801e7086-5742-4a04-962c-7546284aa12d.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1500.978497] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b7948323-d308-4e28-8cfd-b5434dba03fa {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.987453] env[62405]: DEBUG oslo_vmware.api [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Waiting for the task: (returnval){ [ 1500.987453] env[62405]: value = "task-1946772" [ 1500.987453] env[62405]: _type = "Task" [ 1500.987453] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1501.001884] env[62405]: DEBUG oslo_vmware.api [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Task: {'id': task-1946772, 'name': Rename_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1501.045142] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ca3b109f-8056-46fe-bbbd-a9b0e2e826e1 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Acquiring lock "2257c786-54f9-441a-832c-cf3178bfcc78" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1501.045142] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ca3b109f-8056-46fe-bbbd-a9b0e2e826e1 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Lock "2257c786-54f9-441a-832c-cf3178bfcc78" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1501.045142] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ca3b109f-8056-46fe-bbbd-a9b0e2e826e1 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Acquiring lock "2257c786-54f9-441a-832c-cf3178bfcc78-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1501.045142] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ca3b109f-8056-46fe-bbbd-a9b0e2e826e1 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Lock "2257c786-54f9-441a-832c-cf3178bfcc78-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1501.045539] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ca3b109f-8056-46fe-bbbd-a9b0e2e826e1 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Lock "2257c786-54f9-441a-832c-cf3178bfcc78-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1501.047723] env[62405]: INFO nova.compute.manager [None req-ca3b109f-8056-46fe-bbbd-a9b0e2e826e1 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] [instance: 2257c786-54f9-441a-832c-cf3178bfcc78] Terminating instance [ 1501.086367] env[62405]: DEBUG oslo_concurrency.lockutils [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Acquiring lock "b21dc1e7-dacd-4154-9bc3-0fa3774695a8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1501.086616] env[62405]: DEBUG oslo_concurrency.lockutils [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Lock "b21dc1e7-dacd-4154-9bc3-0fa3774695a8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1501.234714] env[62405]: DEBUG oslo_vmware.api [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Task: {'id': task-1946771, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.336196} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1501.235025] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] [instance: 7db1b086-942e-4890-8750-0d717e522786] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1501.236042] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc484e94-4344-44d3-979e-d6f33dfccf53 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.262789] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] [instance: 7db1b086-942e-4890-8750-0d717e522786] Reconfiguring VM instance instance-0000000a to attach disk [datastore1] 7db1b086-942e-4890-8750-0d717e522786/7db1b086-942e-4890-8750-0d717e522786.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1501.263334] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-97bbeaab-6ce1-4c67-8635-8c4bf5fbacc6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.289956] env[62405]: DEBUG oslo_vmware.api [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Waiting for the task: (returnval){ [ 1501.289956] env[62405]: value = "task-1946773" [ 1501.289956] env[62405]: _type = "Task" [ 1501.289956] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1501.301519] env[62405]: DEBUG oslo_vmware.api [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Task: {'id': task-1946773, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1501.313452] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13746ce1-6e43-4b3d-afa8-25b9a4b3f61e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.324678] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a532807-b9bd-4f54-a9f6-80244ed0741c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.356491] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5e3a989-cd49-4494-b795-b4278c1560ec {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.364960] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e26d7304-acf3-4170-be31-36ff1030ad97 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.379493] env[62405]: DEBUG nova.compute.provider_tree [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1501.501663] env[62405]: DEBUG oslo_vmware.api [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Task: {'id': task-1946772, 'name': Rename_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1501.546673] env[62405]: DEBUG nova.network.neutron [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] [instance: 02abae6c-8962-49eb-8fa9-36b13a20eff1] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1501.555962] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ca3b109f-8056-46fe-bbbd-a9b0e2e826e1 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Acquiring lock "refresh_cache-2257c786-54f9-441a-832c-cf3178bfcc78" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1501.555962] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ca3b109f-8056-46fe-bbbd-a9b0e2e826e1 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Acquired lock "refresh_cache-2257c786-54f9-441a-832c-cf3178bfcc78" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1501.555962] env[62405]: DEBUG nova.network.neutron [None req-ca3b109f-8056-46fe-bbbd-a9b0e2e826e1 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] [instance: 2257c786-54f9-441a-832c-cf3178bfcc78] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1501.583105] env[62405]: DEBUG nova.network.neutron [req-d1d87691-4220-4d7d-b619-32de4739f551 req-35156eb8-5884-426a-9287-09b19f3d09fc service nova] [instance: 9b71f962-2b92-4f7b-bb8d-b50da5130018] Updated VIF entry in instance network info cache for port 0805ecfc-d6ef-4bff-a3a1-0f8af74e57a0. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1501.583459] env[62405]: DEBUG nova.network.neutron [req-d1d87691-4220-4d7d-b619-32de4739f551 req-35156eb8-5884-426a-9287-09b19f3d09fc service nova] [instance: 9b71f962-2b92-4f7b-bb8d-b50da5130018] Updating instance_info_cache with network_info: [{"id": "0805ecfc-d6ef-4bff-a3a1-0f8af74e57a0", "address": "fa:16:3e:f7:0a:02", "network": {"id": "672e2f4d-571c-431f-bc4f-101f0e233d70", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-758460415-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "37e71f387ca845b99564479baf7a9012", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4fc0575-c8e3-4f3c-b2e1-e10ac2d0cc1a", "external-id": "nsx-vlan-transportzone-256", "segmentation_id": 256, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0805ecfc-d6", "ovs_interfaceid": "0805ecfc-d6ef-4bff-a3a1-0f8af74e57a0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1501.644840] env[62405]: DEBUG nova.network.neutron [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: 8995f9cb-8454-4a98-9090-290f87f8af18] Successfully updated port: 4582fcd2-4721-4ad7-9452-5b808488dcb2 {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1501.807176] env[62405]: DEBUG oslo_vmware.api [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Task: {'id': task-1946773, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1501.883156] env[62405]: DEBUG nova.scheduler.client.report [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1501.933582] env[62405]: DEBUG nova.compute.manager [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: ca0aca02-4b99-4393-900c-b9cb0dad55c7] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1501.960038] env[62405]: DEBUG nova.virt.hardware [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1501.960038] env[62405]: DEBUG nova.virt.hardware [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1501.960038] env[62405]: DEBUG nova.virt.hardware [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1501.960408] env[62405]: DEBUG nova.virt.hardware [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1501.960408] env[62405]: DEBUG nova.virt.hardware [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1501.960408] env[62405]: DEBUG nova.virt.hardware [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1501.960408] env[62405]: DEBUG nova.virt.hardware [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1501.960408] env[62405]: DEBUG nova.virt.hardware [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1501.960979] env[62405]: DEBUG nova.virt.hardware [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1501.961225] env[62405]: DEBUG nova.virt.hardware [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1501.961427] env[62405]: DEBUG nova.virt.hardware [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1501.962603] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21fd0ca9-1dbc-465f-9d44-691a77249534 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.972638] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf066a7f-7bd4-456f-9bc2-49aca2cebac8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.999575] env[62405]: DEBUG oslo_vmware.api [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Task: {'id': task-1946772, 'name': Rename_Task, 'duration_secs': 0.564132} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1502.000054] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] [instance: 801e7086-5742-4a04-962c-7546284aa12d] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1502.000241] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b25720dc-bba2-4afb-9991-5197faf2df15 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.007810] env[62405]: DEBUG oslo_vmware.api [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Waiting for the task: (returnval){ [ 1502.007810] env[62405]: value = "task-1946774" [ 1502.007810] env[62405]: _type = "Task" [ 1502.007810] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1502.008619] env[62405]: DEBUG nova.network.neutron [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: a1d35009-ea11-4e64-bbe4-604ed39d08f4] Successfully updated port: d385dca6-fc58-4113-bd50-3886fbe12d53 {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1502.019501] env[62405]: DEBUG oslo_vmware.api [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Task: {'id': task-1946774, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1502.039923] env[62405]: DEBUG nova.network.neutron [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] [instance: 02abae6c-8962-49eb-8fa9-36b13a20eff1] Updating instance_info_cache with network_info: [{"id": "77f01bbe-48b5-4ad3-b215-90ff9d429d0b", "address": "fa:16:3e:07:19:b6", "network": {"id": "9e3e6e3d-df77-428f-b577-e4a42e82ec43", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.28", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "69dc3a146cd14230b1180689e2fea090", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31e77685-b4dd-4810-80ef-24115ea9ea62", "external-id": "nsx-vlan-transportzone-56", "segmentation_id": 56, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap77f01bbe-48", "ovs_interfaceid": "77f01bbe-48b5-4ad3-b215-90ff9d429d0b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1502.081587] env[62405]: DEBUG nova.network.neutron [None req-ca3b109f-8056-46fe-bbbd-a9b0e2e826e1 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] [instance: 2257c786-54f9-441a-832c-cf3178bfcc78] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1502.088523] env[62405]: DEBUG oslo_concurrency.lockutils [req-d1d87691-4220-4d7d-b619-32de4739f551 req-35156eb8-5884-426a-9287-09b19f3d09fc service nova] Releasing lock "refresh_cache-9b71f962-2b92-4f7b-bb8d-b50da5130018" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1502.146386] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Acquiring lock "refresh_cache-8995f9cb-8454-4a98-9090-290f87f8af18" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1502.146548] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Acquired lock "refresh_cache-8995f9cb-8454-4a98-9090-290f87f8af18" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1502.146733] env[62405]: DEBUG nova.network.neutron [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: 8995f9cb-8454-4a98-9090-290f87f8af18] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1502.207390] env[62405]: DEBUG nova.network.neutron [None req-ca3b109f-8056-46fe-bbbd-a9b0e2e826e1 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] [instance: 2257c786-54f9-441a-832c-cf3178bfcc78] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1502.305769] env[62405]: DEBUG oslo_vmware.api [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Task: {'id': task-1946773, 'name': ReconfigVM_Task, 'duration_secs': 0.893404} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1502.305769] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] [instance: 7db1b086-942e-4890-8750-0d717e522786] Reconfigured VM instance instance-0000000a to attach disk [datastore1] 7db1b086-942e-4890-8750-0d717e522786/7db1b086-942e-4890-8750-0d717e522786.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1502.306150] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bb5bd6c4-a686-4d60-935c-d56df0a775bf {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.316827] env[62405]: DEBUG oslo_vmware.api [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Waiting for the task: (returnval){ [ 1502.316827] env[62405]: value = "task-1946775" [ 1502.316827] env[62405]: _type = "Task" [ 1502.316827] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1502.326081] env[62405]: DEBUG oslo_vmware.api [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Task: {'id': task-1946775, 'name': Rename_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1502.355429] env[62405]: DEBUG nova.network.neutron [-] [instance: 8624629d-642a-4adf-984e-3925beeb4fef] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1502.389293] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.483s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1502.389985] env[62405]: DEBUG nova.compute.manager [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b8ff115b-64f1-4584-afa2-478c5e6b726b] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1502.392983] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 7.384s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1502.393257] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1502.393523] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62405) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1502.393785] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e791cc57-ae0f-4404-8e7f-e93fe6fdcaf7 tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.946s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1502.401393] env[62405]: INFO nova.compute.claims [None req-e791cc57-ae0f-4404-8e7f-e93fe6fdcaf7 tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] [instance: 3f9849b8-6aaa-4d32-b140-207d5b54d68f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1502.404814] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8832958-33bf-4c44-97c3-cc2e79b7755b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.416252] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dd95a6f-4fc3-434c-81a2-dc7f9b37a0fd {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.433691] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-559fc471-e241-47e1-a0b2-1fbfb3b75374 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.440513] env[62405]: DEBUG nova.compute.manager [req-0a99ba2e-57c4-447f-91c5-d7055dce3f33 req-dc00ef84-4753-46e6-9dad-aa02da106f82 service nova] [instance: 801e7086-5742-4a04-962c-7546284aa12d] Received event network-changed-a6c201e5-eb87-434f-9c74-9f99937836fd {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1502.440792] env[62405]: DEBUG nova.compute.manager [req-0a99ba2e-57c4-447f-91c5-d7055dce3f33 req-dc00ef84-4753-46e6-9dad-aa02da106f82 service nova] [instance: 801e7086-5742-4a04-962c-7546284aa12d] Refreshing instance network info cache due to event network-changed-a6c201e5-eb87-434f-9c74-9f99937836fd. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1502.441012] env[62405]: DEBUG oslo_concurrency.lockutils [req-0a99ba2e-57c4-447f-91c5-d7055dce3f33 req-dc00ef84-4753-46e6-9dad-aa02da106f82 service nova] Acquiring lock "refresh_cache-801e7086-5742-4a04-962c-7546284aa12d" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1502.441177] env[62405]: DEBUG oslo_concurrency.lockutils [req-0a99ba2e-57c4-447f-91c5-d7055dce3f33 req-dc00ef84-4753-46e6-9dad-aa02da106f82 service nova] Acquired lock "refresh_cache-801e7086-5742-4a04-962c-7546284aa12d" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1502.441393] env[62405]: DEBUG nova.network.neutron [req-0a99ba2e-57c4-447f-91c5-d7055dce3f33 req-dc00ef84-4753-46e6-9dad-aa02da106f82 service nova] [instance: 801e7086-5742-4a04-962c-7546284aa12d] Refreshing network info cache for port a6c201e5-eb87-434f-9c74-9f99937836fd {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1502.446693] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98e75c17-0174-491b-9443-cae02d67a210 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.489976] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181320MB free_disk=171GB free_vcpus=48 pci_devices=None {{(pid=62405) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1502.490189] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1502.516946] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Acquiring lock "refresh_cache-a1d35009-ea11-4e64-bbe4-604ed39d08f4" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1502.517258] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Acquired lock "refresh_cache-a1d35009-ea11-4e64-bbe4-604ed39d08f4" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1502.517375] env[62405]: DEBUG nova.network.neutron [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: a1d35009-ea11-4e64-bbe4-604ed39d08f4] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1502.529143] env[62405]: DEBUG oslo_vmware.api [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Task: {'id': task-1946774, 'name': PowerOnVM_Task} progress is 1%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1502.543112] env[62405]: DEBUG oslo_concurrency.lockutils [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Releasing lock "refresh_cache-02abae6c-8962-49eb-8fa9-36b13a20eff1" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1502.543583] env[62405]: DEBUG nova.compute.manager [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] [instance: 02abae6c-8962-49eb-8fa9-36b13a20eff1] Instance network_info: |[{"id": "77f01bbe-48b5-4ad3-b215-90ff9d429d0b", "address": "fa:16:3e:07:19:b6", "network": {"id": "9e3e6e3d-df77-428f-b577-e4a42e82ec43", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.28", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "69dc3a146cd14230b1180689e2fea090", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31e77685-b4dd-4810-80ef-24115ea9ea62", "external-id": "nsx-vlan-transportzone-56", "segmentation_id": 56, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap77f01bbe-48", "ovs_interfaceid": "77f01bbe-48b5-4ad3-b215-90ff9d429d0b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1502.544314] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] [instance: 02abae6c-8962-49eb-8fa9-36b13a20eff1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:07:19:b6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '31e77685-b4dd-4810-80ef-24115ea9ea62', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '77f01bbe-48b5-4ad3-b215-90ff9d429d0b', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1502.555562] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Creating folder: Project (e3c5cc973d264698a415b007cde8bd9f). Parent ref: group-v401284. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1502.556527] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-92626720-32ee-441b-a4b3-274e21f11c4c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.569650] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Created folder: Project (e3c5cc973d264698a415b007cde8bd9f) in parent group-v401284. [ 1502.569886] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Creating folder: Instances. Parent ref: group-v401322. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1502.570156] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9ed79041-724a-430d-aebd-3b03f600256a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.582041] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Created folder: Instances in parent group-v401322. [ 1502.582351] env[62405]: DEBUG oslo.service.loopingcall [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1502.582518] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 02abae6c-8962-49eb-8fa9-36b13a20eff1] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1502.582774] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-29554d7a-045f-4ba2-b4c4-349b123c4662 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.612938] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1502.612938] env[62405]: value = "task-1946778" [ 1502.612938] env[62405]: _type = "Task" [ 1502.612938] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1502.627523] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946778, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1502.711026] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ca3b109f-8056-46fe-bbbd-a9b0e2e826e1 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Releasing lock "refresh_cache-2257c786-54f9-441a-832c-cf3178bfcc78" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1502.711468] env[62405]: DEBUG nova.compute.manager [None req-ca3b109f-8056-46fe-bbbd-a9b0e2e826e1 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] [instance: 2257c786-54f9-441a-832c-cf3178bfcc78] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1502.711668] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-ca3b109f-8056-46fe-bbbd-a9b0e2e826e1 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] [instance: 2257c786-54f9-441a-832c-cf3178bfcc78] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1502.713474] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa756943-a99a-4d8f-8b7b-283dd1c12691 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.721359] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca3b109f-8056-46fe-bbbd-a9b0e2e826e1 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] [instance: 2257c786-54f9-441a-832c-cf3178bfcc78] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1502.721662] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-51865b15-d8e7-40fd-b44a-445c7feb35a6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.731592] env[62405]: DEBUG oslo_vmware.api [None req-ca3b109f-8056-46fe-bbbd-a9b0e2e826e1 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Waiting for the task: (returnval){ [ 1502.731592] env[62405]: value = "task-1946779" [ 1502.731592] env[62405]: _type = "Task" [ 1502.731592] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1502.744611] env[62405]: DEBUG oslo_vmware.api [None req-ca3b109f-8056-46fe-bbbd-a9b0e2e826e1 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Task: {'id': task-1946779, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1502.754713] env[62405]: DEBUG nova.network.neutron [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: 8995f9cb-8454-4a98-9090-290f87f8af18] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1502.834980] env[62405]: DEBUG oslo_vmware.api [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Task: {'id': task-1946775, 'name': Rename_Task, 'duration_secs': 0.23748} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1502.835210] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] [instance: 7db1b086-942e-4890-8750-0d717e522786] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1502.835452] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4a33c504-9ad8-400e-9049-76298340eb7a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.843767] env[62405]: DEBUG oslo_vmware.api [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Waiting for the task: (returnval){ [ 1502.843767] env[62405]: value = "task-1946780" [ 1502.843767] env[62405]: _type = "Task" [ 1502.843767] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1502.853822] env[62405]: DEBUG oslo_vmware.api [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Task: {'id': task-1946780, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1502.859668] env[62405]: INFO nova.compute.manager [-] [instance: 8624629d-642a-4adf-984e-3925beeb4fef] Took 2.00 seconds to deallocate network for instance. [ 1502.880900] env[62405]: DEBUG nova.network.neutron [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: ca0aca02-4b99-4393-900c-b9cb0dad55c7] Successfully updated port: 2df3353e-cc22-401d-ba57-099a6e08d7e7 {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1502.905258] env[62405]: DEBUG nova.compute.utils [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1502.907474] env[62405]: DEBUG nova.compute.manager [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b8ff115b-64f1-4584-afa2-478c5e6b726b] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1502.907664] env[62405]: DEBUG nova.network.neutron [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b8ff115b-64f1-4584-afa2-478c5e6b726b] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1502.980822] env[62405]: DEBUG nova.policy [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '41b79ac0838e4c0198236033d43199db', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd5178d8cc12f46f3a8599384d4be9b6b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1503.026654] env[62405]: DEBUG oslo_vmware.api [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Task: {'id': task-1946774, 'name': PowerOnVM_Task} progress is 1%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1503.033443] env[62405]: DEBUG oslo_concurrency.lockutils [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Acquiring lock "ca4d11fe-1d0f-468b-a2f4-21c5b84342ab" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1503.033723] env[62405]: DEBUG oslo_concurrency.lockutils [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Lock "ca4d11fe-1d0f-468b-a2f4-21c5b84342ab" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1503.125485] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946778, 'name': CreateVM_Task} progress is 25%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1503.166346] env[62405]: DEBUG nova.network.neutron [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: a1d35009-ea11-4e64-bbe4-604ed39d08f4] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1503.244308] env[62405]: DEBUG oslo_vmware.api [None req-ca3b109f-8056-46fe-bbbd-a9b0e2e826e1 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Task: {'id': task-1946779, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1503.247844] env[62405]: DEBUG nova.network.neutron [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: 8995f9cb-8454-4a98-9090-290f87f8af18] Updating instance_info_cache with network_info: [{"id": "4582fcd2-4721-4ad7-9452-5b808488dcb2", "address": "fa:16:3e:f8:6b:65", "network": {"id": "1428cc25-4f53-4239-85db-9f8c1df3b565", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-160875770-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e05b3582b75842c5908781d74ee041aa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d94740a-bce8-4103-8ecf-230d02ec0a44", "external-id": "nsx-vlan-transportzone-149", "segmentation_id": 149, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4582fcd2-47", "ovs_interfaceid": "4582fcd2-4721-4ad7-9452-5b808488dcb2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1503.358170] env[62405]: DEBUG oslo_vmware.api [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Task: {'id': task-1946780, 'name': PowerOnVM_Task} progress is 1%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1503.369704] env[62405]: DEBUG nova.network.neutron [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b8ff115b-64f1-4584-afa2-478c5e6b726b] Successfully created port: 531c83a1-6a38-4d64-8757-3ffee5c271ee {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1503.374018] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7eb37233-64e1-4fca-802f-e3edb3d5e25d tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1503.384474] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Acquiring lock "refresh_cache-ca0aca02-4b99-4393-900c-b9cb0dad55c7" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1503.384681] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Acquired lock "refresh_cache-ca0aca02-4b99-4393-900c-b9cb0dad55c7" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1503.385477] env[62405]: DEBUG nova.network.neutron [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: ca0aca02-4b99-4393-900c-b9cb0dad55c7] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1503.408605] env[62405]: DEBUG nova.compute.manager [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b8ff115b-64f1-4584-afa2-478c5e6b726b] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1503.525950] env[62405]: DEBUG oslo_vmware.api [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Task: {'id': task-1946774, 'name': PowerOnVM_Task} progress is 82%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1503.633024] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946778, 'name': CreateVM_Task} progress is 25%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1503.742250] env[62405]: DEBUG oslo_vmware.api [None req-ca3b109f-8056-46fe-bbbd-a9b0e2e826e1 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Task: {'id': task-1946779, 'name': PowerOffVM_Task, 'duration_secs': 0.947769} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1503.742579] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca3b109f-8056-46fe-bbbd-a9b0e2e826e1 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] [instance: 2257c786-54f9-441a-832c-cf3178bfcc78] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1503.742778] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-ca3b109f-8056-46fe-bbbd-a9b0e2e826e1 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] [instance: 2257c786-54f9-441a-832c-cf3178bfcc78] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1503.743059] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-55b0f47f-9742-409b-8538-e9ac22e1a6ff {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.755895] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Releasing lock "refresh_cache-8995f9cb-8454-4a98-9090-290f87f8af18" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1503.756386] env[62405]: DEBUG nova.compute.manager [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: 8995f9cb-8454-4a98-9090-290f87f8af18] Instance network_info: |[{"id": "4582fcd2-4721-4ad7-9452-5b808488dcb2", "address": "fa:16:3e:f8:6b:65", "network": {"id": "1428cc25-4f53-4239-85db-9f8c1df3b565", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-160875770-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e05b3582b75842c5908781d74ee041aa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d94740a-bce8-4103-8ecf-230d02ec0a44", "external-id": "nsx-vlan-transportzone-149", "segmentation_id": 149, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4582fcd2-47", "ovs_interfaceid": "4582fcd2-4721-4ad7-9452-5b808488dcb2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1503.756668] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: 8995f9cb-8454-4a98-9090-290f87f8af18] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f8:6b:65', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2d94740a-bce8-4103-8ecf-230d02ec0a44', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4582fcd2-4721-4ad7-9452-5b808488dcb2', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1503.769969] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Creating folder: Project (e05b3582b75842c5908781d74ee041aa). Parent ref: group-v401284. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1503.774146] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-61e5201d-1e81-42a5-8860-22c2bbfcedcb {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.778145] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-ca3b109f-8056-46fe-bbbd-a9b0e2e826e1 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] [instance: 2257c786-54f9-441a-832c-cf3178bfcc78] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1503.778463] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-ca3b109f-8056-46fe-bbbd-a9b0e2e826e1 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] [instance: 2257c786-54f9-441a-832c-cf3178bfcc78] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1503.778579] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca3b109f-8056-46fe-bbbd-a9b0e2e826e1 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Deleting the datastore file [datastore1] 2257c786-54f9-441a-832c-cf3178bfcc78 {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1503.779647] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c7bde47e-9a1d-4eca-975b-170c15be5da0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.787523] env[62405]: DEBUG oslo_vmware.api [None req-ca3b109f-8056-46fe-bbbd-a9b0e2e826e1 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Waiting for the task: (returnval){ [ 1503.787523] env[62405]: value = "task-1946784" [ 1503.787523] env[62405]: _type = "Task" [ 1503.787523] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1503.796044] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Created folder: Project (e05b3582b75842c5908781d74ee041aa) in parent group-v401284. [ 1503.796322] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Creating folder: Instances. Parent ref: group-v401325. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1503.797492] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dddaf8d7-eb32-4e0b-a93f-66ff1d8fe0bc {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.804924] env[62405]: DEBUG oslo_vmware.api [None req-ca3b109f-8056-46fe-bbbd-a9b0e2e826e1 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Task: {'id': task-1946784, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1503.818713] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Created folder: Instances in parent group-v401325. [ 1503.818999] env[62405]: DEBUG oslo.service.loopingcall [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1503.819209] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8995f9cb-8454-4a98-9090-290f87f8af18] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1503.819426] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4b777078-ab9e-4474-b2bd-8894de761d4a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.852530] env[62405]: DEBUG nova.network.neutron [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: a1d35009-ea11-4e64-bbe4-604ed39d08f4] Updating instance_info_cache with network_info: [{"id": "d385dca6-fc58-4113-bd50-3886fbe12d53", "address": "fa:16:3e:3b:50:9d", "network": {"id": "1428cc25-4f53-4239-85db-9f8c1df3b565", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-160875770-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e05b3582b75842c5908781d74ee041aa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d94740a-bce8-4103-8ecf-230d02ec0a44", "external-id": "nsx-vlan-transportzone-149", "segmentation_id": 149, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd385dca6-fc", "ovs_interfaceid": "d385dca6-fc58-4113-bd50-3886fbe12d53", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1503.853594] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1503.853594] env[62405]: value = "task-1946786" [ 1503.853594] env[62405]: _type = "Task" [ 1503.853594] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1503.872198] env[62405]: DEBUG oslo_vmware.api [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Task: {'id': task-1946780, 'name': PowerOnVM_Task} progress is 64%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1503.878052] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946786, 'name': CreateVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1503.923414] env[62405]: DEBUG oslo_concurrency.lockutils [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Acquiring lock "65462c7a-372e-4ba6-8f6d-e300080d65d0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1503.923760] env[62405]: DEBUG oslo_concurrency.lockutils [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Lock "65462c7a-372e-4ba6-8f6d-e300080d65d0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1503.937153] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-851a0dd7-03a5-421e-bacc-ea44f78b518d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.947761] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fc8ce60-7980-40a3-816a-6cf849b535f3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.951466] env[62405]: DEBUG nova.network.neutron [req-0a99ba2e-57c4-447f-91c5-d7055dce3f33 req-dc00ef84-4753-46e6-9dad-aa02da106f82 service nova] [instance: 801e7086-5742-4a04-962c-7546284aa12d] Updated VIF entry in instance network info cache for port a6c201e5-eb87-434f-9c74-9f99937836fd. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1503.951912] env[62405]: DEBUG nova.network.neutron [req-0a99ba2e-57c4-447f-91c5-d7055dce3f33 req-dc00ef84-4753-46e6-9dad-aa02da106f82 service nova] [instance: 801e7086-5742-4a04-962c-7546284aa12d] Updating instance_info_cache with network_info: [{"id": "a6c201e5-eb87-434f-9c74-9f99937836fd", "address": "fa:16:3e:86:dc:ab", "network": {"id": "9e3e6e3d-df77-428f-b577-e4a42e82ec43", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.74", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "69dc3a146cd14230b1180689e2fea090", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31e77685-b4dd-4810-80ef-24115ea9ea62", "external-id": "nsx-vlan-transportzone-56", "segmentation_id": 56, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa6c201e5-eb", "ovs_interfaceid": "a6c201e5-eb87-434f-9c74-9f99937836fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1503.989926] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9f697ac-a7da-435d-a60c-8d0467104f32 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.993445] env[62405]: DEBUG nova.network.neutron [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: ca0aca02-4b99-4393-900c-b9cb0dad55c7] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1504.004606] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff7c51a2-50c9-4cde-9d58-36cfddf0978c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.022591] env[62405]: DEBUG nova.compute.provider_tree [None req-e791cc57-ae0f-4404-8e7f-e93fe6fdcaf7 tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1504.041595] env[62405]: DEBUG oslo_vmware.api [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Task: {'id': task-1946774, 'name': PowerOnVM_Task, 'duration_secs': 1.667003} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1504.042444] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] [instance: 801e7086-5742-4a04-962c-7546284aa12d] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1504.042444] env[62405]: INFO nova.compute.manager [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] [instance: 801e7086-5742-4a04-962c-7546284aa12d] Took 14.56 seconds to spawn the instance on the hypervisor. [ 1504.042444] env[62405]: DEBUG nova.compute.manager [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] [instance: 801e7086-5742-4a04-962c-7546284aa12d] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1504.043313] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b58f4e9d-7734-4c5c-9546-103559eb71d9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.127841] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946778, 'name': CreateVM_Task, 'duration_secs': 1.411002} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1504.128047] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 02abae6c-8962-49eb-8fa9-36b13a20eff1] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1504.129568] env[62405]: DEBUG oslo_concurrency.lockutils [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1504.129568] env[62405]: DEBUG oslo_concurrency.lockutils [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1504.129568] env[62405]: DEBUG oslo_concurrency.lockutils [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1504.129568] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2e71265c-2ec4-4818-b05b-a381c3cd4b23 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.134821] env[62405]: DEBUG oslo_vmware.api [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Waiting for the task: (returnval){ [ 1504.134821] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]520ca60b-e652-65ea-0d7d-22576eb3844b" [ 1504.134821] env[62405]: _type = "Task" [ 1504.134821] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1504.142968] env[62405]: DEBUG oslo_vmware.api [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]520ca60b-e652-65ea-0d7d-22576eb3844b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1504.298769] env[62405]: DEBUG oslo_vmware.api [None req-ca3b109f-8056-46fe-bbbd-a9b0e2e826e1 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Task: {'id': task-1946784, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.506932} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1504.299050] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca3b109f-8056-46fe-bbbd-a9b0e2e826e1 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1504.299598] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-ca3b109f-8056-46fe-bbbd-a9b0e2e826e1 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] [instance: 2257c786-54f9-441a-832c-cf3178bfcc78] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1504.299598] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-ca3b109f-8056-46fe-bbbd-a9b0e2e826e1 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] [instance: 2257c786-54f9-441a-832c-cf3178bfcc78] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1504.299598] env[62405]: INFO nova.compute.manager [None req-ca3b109f-8056-46fe-bbbd-a9b0e2e826e1 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] [instance: 2257c786-54f9-441a-832c-cf3178bfcc78] Took 1.59 seconds to destroy the instance on the hypervisor. [ 1504.299811] env[62405]: DEBUG oslo.service.loopingcall [None req-ca3b109f-8056-46fe-bbbd-a9b0e2e826e1 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1504.300217] env[62405]: DEBUG nova.compute.manager [-] [instance: 2257c786-54f9-441a-832c-cf3178bfcc78] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1504.300217] env[62405]: DEBUG nova.network.neutron [-] [instance: 2257c786-54f9-441a-832c-cf3178bfcc78] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1504.355078] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Releasing lock "refresh_cache-a1d35009-ea11-4e64-bbe4-604ed39d08f4" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1504.355444] env[62405]: DEBUG nova.compute.manager [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: a1d35009-ea11-4e64-bbe4-604ed39d08f4] Instance network_info: |[{"id": "d385dca6-fc58-4113-bd50-3886fbe12d53", "address": "fa:16:3e:3b:50:9d", "network": {"id": "1428cc25-4f53-4239-85db-9f8c1df3b565", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-160875770-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e05b3582b75842c5908781d74ee041aa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d94740a-bce8-4103-8ecf-230d02ec0a44", "external-id": "nsx-vlan-transportzone-149", "segmentation_id": 149, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd385dca6-fc", "ovs_interfaceid": "d385dca6-fc58-4113-bd50-3886fbe12d53", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1504.355728] env[62405]: DEBUG oslo_vmware.api [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Task: {'id': task-1946780, 'name': PowerOnVM_Task} progress is 64%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1504.356133] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: a1d35009-ea11-4e64-bbe4-604ed39d08f4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3b:50:9d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2d94740a-bce8-4103-8ecf-230d02ec0a44', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd385dca6-fc58-4113-bd50-3886fbe12d53', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1504.363548] env[62405]: DEBUG oslo.service.loopingcall [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1504.366897] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a1d35009-ea11-4e64-bbe4-604ed39d08f4] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1504.367135] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e812c5fc-ff0a-4892-92c7-529211422e43 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.386948] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946786, 'name': CreateVM_Task} progress is 25%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1504.388168] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1504.388168] env[62405]: value = "task-1946787" [ 1504.388168] env[62405]: _type = "Task" [ 1504.388168] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1504.396434] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946787, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1504.408468] env[62405]: DEBUG nova.network.neutron [-] [instance: 2257c786-54f9-441a-832c-cf3178bfcc78] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1504.431198] env[62405]: DEBUG nova.compute.manager [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b8ff115b-64f1-4584-afa2-478c5e6b726b] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1504.454841] env[62405]: DEBUG oslo_concurrency.lockutils [req-0a99ba2e-57c4-447f-91c5-d7055dce3f33 req-dc00ef84-4753-46e6-9dad-aa02da106f82 service nova] Releasing lock "refresh_cache-801e7086-5742-4a04-962c-7546284aa12d" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1504.455678] env[62405]: DEBUG nova.compute.manager [req-0a99ba2e-57c4-447f-91c5-d7055dce3f33 req-dc00ef84-4753-46e6-9dad-aa02da106f82 service nova] [instance: 7db1b086-942e-4890-8750-0d717e522786] Received event network-vif-plugged-acb33455-b824-40fd-99bd-4628778412a0 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1504.455678] env[62405]: DEBUG oslo_concurrency.lockutils [req-0a99ba2e-57c4-447f-91c5-d7055dce3f33 req-dc00ef84-4753-46e6-9dad-aa02da106f82 service nova] Acquiring lock "7db1b086-942e-4890-8750-0d717e522786-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1504.455678] env[62405]: DEBUG oslo_concurrency.lockutils [req-0a99ba2e-57c4-447f-91c5-d7055dce3f33 req-dc00ef84-4753-46e6-9dad-aa02da106f82 service nova] Lock "7db1b086-942e-4890-8750-0d717e522786-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1504.455678] env[62405]: DEBUG oslo_concurrency.lockutils [req-0a99ba2e-57c4-447f-91c5-d7055dce3f33 req-dc00ef84-4753-46e6-9dad-aa02da106f82 service nova] Lock "7db1b086-942e-4890-8750-0d717e522786-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1504.455879] env[62405]: DEBUG nova.compute.manager [req-0a99ba2e-57c4-447f-91c5-d7055dce3f33 req-dc00ef84-4753-46e6-9dad-aa02da106f82 service nova] [instance: 7db1b086-942e-4890-8750-0d717e522786] No waiting events found dispatching network-vif-plugged-acb33455-b824-40fd-99bd-4628778412a0 {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1504.455972] env[62405]: WARNING nova.compute.manager [req-0a99ba2e-57c4-447f-91c5-d7055dce3f33 req-dc00ef84-4753-46e6-9dad-aa02da106f82 service nova] [instance: 7db1b086-942e-4890-8750-0d717e522786] Received unexpected event network-vif-plugged-acb33455-b824-40fd-99bd-4628778412a0 for instance with vm_state building and task_state spawning. [ 1504.456140] env[62405]: DEBUG nova.compute.manager [req-0a99ba2e-57c4-447f-91c5-d7055dce3f33 req-dc00ef84-4753-46e6-9dad-aa02da106f82 service nova] [instance: 7db1b086-942e-4890-8750-0d717e522786] Received event network-changed-acb33455-b824-40fd-99bd-4628778412a0 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1504.456350] env[62405]: DEBUG nova.compute.manager [req-0a99ba2e-57c4-447f-91c5-d7055dce3f33 req-dc00ef84-4753-46e6-9dad-aa02da106f82 service nova] [instance: 7db1b086-942e-4890-8750-0d717e522786] Refreshing instance network info cache due to event network-changed-acb33455-b824-40fd-99bd-4628778412a0. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1504.456468] env[62405]: DEBUG oslo_concurrency.lockutils [req-0a99ba2e-57c4-447f-91c5-d7055dce3f33 req-dc00ef84-4753-46e6-9dad-aa02da106f82 service nova] Acquiring lock "refresh_cache-7db1b086-942e-4890-8750-0d717e522786" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1504.457658] env[62405]: DEBUG oslo_concurrency.lockutils [req-0a99ba2e-57c4-447f-91c5-d7055dce3f33 req-dc00ef84-4753-46e6-9dad-aa02da106f82 service nova] Acquired lock "refresh_cache-7db1b086-942e-4890-8750-0d717e522786" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1504.457658] env[62405]: DEBUG nova.network.neutron [req-0a99ba2e-57c4-447f-91c5-d7055dce3f33 req-dc00ef84-4753-46e6-9dad-aa02da106f82 service nova] [instance: 7db1b086-942e-4890-8750-0d717e522786] Refreshing network info cache for port acb33455-b824-40fd-99bd-4628778412a0 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1504.472984] env[62405]: DEBUG nova.virt.hardware [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1504.472984] env[62405]: DEBUG nova.virt.hardware [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1504.472984] env[62405]: DEBUG nova.virt.hardware [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1504.473603] env[62405]: DEBUG nova.virt.hardware [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1504.473603] env[62405]: DEBUG nova.virt.hardware [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1504.473603] env[62405]: DEBUG nova.virt.hardware [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1504.473603] env[62405]: DEBUG nova.virt.hardware [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1504.473603] env[62405]: DEBUG nova.virt.hardware [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1504.473809] env[62405]: DEBUG nova.virt.hardware [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1504.473809] env[62405]: DEBUG nova.virt.hardware [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1504.473809] env[62405]: DEBUG nova.virt.hardware [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1504.475836] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a0dbba1-67f6-4cf0-847d-55efa15da616 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.489038] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e2f9e78-ea09-44d6-bf4a-6d6657b6a6fc {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.530302] env[62405]: DEBUG nova.scheduler.client.report [None req-e791cc57-ae0f-4404-8e7f-e93fe6fdcaf7 tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1504.564148] env[62405]: INFO nova.compute.manager [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] [instance: 801e7086-5742-4a04-962c-7546284aa12d] Took 25.86 seconds to build instance. [ 1504.651497] env[62405]: DEBUG oslo_vmware.api [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]520ca60b-e652-65ea-0d7d-22576eb3844b, 'name': SearchDatastore_Task, 'duration_secs': 0.044953} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1504.651815] env[62405]: DEBUG oslo_concurrency.lockutils [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1504.652054] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] [instance: 02abae6c-8962-49eb-8fa9-36b13a20eff1] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1504.652304] env[62405]: DEBUG oslo_concurrency.lockutils [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1504.652448] env[62405]: DEBUG oslo_concurrency.lockutils [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1504.652627] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1504.652916] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1e749e2d-db8d-4824-92f1-b7e5b3c03880 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.663918] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1504.664068] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1504.664833] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-72e58e33-ded5-42ff-a583-39e5106174d1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.672288] env[62405]: DEBUG oslo_vmware.api [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Waiting for the task: (returnval){ [ 1504.672288] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5244afa0-839a-4cd0-b3bb-efc871ab5ff9" [ 1504.672288] env[62405]: _type = "Task" [ 1504.672288] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1504.682287] env[62405]: DEBUG nova.network.neutron [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: ca0aca02-4b99-4393-900c-b9cb0dad55c7] Updating instance_info_cache with network_info: [{"id": "2df3353e-cc22-401d-ba57-099a6e08d7e7", "address": "fa:16:3e:8e:9d:0d", "network": {"id": "1428cc25-4f53-4239-85db-9f8c1df3b565", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-160875770-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e05b3582b75842c5908781d74ee041aa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d94740a-bce8-4103-8ecf-230d02ec0a44", "external-id": "nsx-vlan-transportzone-149", "segmentation_id": 149, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2df3353e-cc", "ovs_interfaceid": "2df3353e-cc22-401d-ba57-099a6e08d7e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1504.683690] env[62405]: DEBUG oslo_vmware.api [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5244afa0-839a-4cd0-b3bb-efc871ab5ff9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1504.857939] env[62405]: DEBUG oslo_vmware.api [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Task: {'id': task-1946780, 'name': PowerOnVM_Task} progress is 82%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1504.868095] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946786, 'name': CreateVM_Task} progress is 25%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1504.899764] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946787, 'name': CreateVM_Task} progress is 25%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1504.911332] env[62405]: DEBUG nova.network.neutron [-] [instance: 2257c786-54f9-441a-832c-cf3178bfcc78] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1505.011233] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7bb24fb1-ca55-4d29-81b1-3d8c80e2f6fe tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Acquiring lock "3c0b964f-c900-4704-ae12-7eba7952f678" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1505.013360] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7bb24fb1-ca55-4d29-81b1-3d8c80e2f6fe tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Lock "3c0b964f-c900-4704-ae12-7eba7952f678" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1505.013360] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7bb24fb1-ca55-4d29-81b1-3d8c80e2f6fe tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Acquiring lock "3c0b964f-c900-4704-ae12-7eba7952f678-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1505.013360] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7bb24fb1-ca55-4d29-81b1-3d8c80e2f6fe tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Lock "3c0b964f-c900-4704-ae12-7eba7952f678-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1505.013360] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7bb24fb1-ca55-4d29-81b1-3d8c80e2f6fe tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Lock "3c0b964f-c900-4704-ae12-7eba7952f678-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1505.016948] env[62405]: INFO nova.compute.manager [None req-7bb24fb1-ca55-4d29-81b1-3d8c80e2f6fe tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] [instance: 3c0b964f-c900-4704-ae12-7eba7952f678] Terminating instance [ 1505.038673] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e791cc57-ae0f-4404-8e7f-e93fe6fdcaf7 tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.645s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1505.039227] env[62405]: DEBUG nova.compute.manager [None req-e791cc57-ae0f-4404-8e7f-e93fe6fdcaf7 tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] [instance: 3f9849b8-6aaa-4d32-b140-207d5b54d68f] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1505.045954] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.455s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1505.050028] env[62405]: INFO nova.compute.claims [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] [instance: 792cd2c8-a67d-4b16-93ab-722fcc8b622d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1505.065416] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7908c849-0d5f-4ca0-9cbb-ba4b871ed0c5 tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Lock "801e7086-5742-4a04-962c-7546284aa12d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.366s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1505.188325] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Releasing lock "refresh_cache-ca0aca02-4b99-4393-900c-b9cb0dad55c7" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1505.188704] env[62405]: DEBUG nova.compute.manager [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: ca0aca02-4b99-4393-900c-b9cb0dad55c7] Instance network_info: |[{"id": "2df3353e-cc22-401d-ba57-099a6e08d7e7", "address": "fa:16:3e:8e:9d:0d", "network": {"id": "1428cc25-4f53-4239-85db-9f8c1df3b565", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-160875770-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e05b3582b75842c5908781d74ee041aa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d94740a-bce8-4103-8ecf-230d02ec0a44", "external-id": "nsx-vlan-transportzone-149", "segmentation_id": 149, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2df3353e-cc", "ovs_interfaceid": "2df3353e-cc22-401d-ba57-099a6e08d7e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1505.192629] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: ca0aca02-4b99-4393-900c-b9cb0dad55c7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8e:9d:0d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2d94740a-bce8-4103-8ecf-230d02ec0a44', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2df3353e-cc22-401d-ba57-099a6e08d7e7', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1505.201956] env[62405]: DEBUG oslo.service.loopingcall [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1505.206623] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ca0aca02-4b99-4393-900c-b9cb0dad55c7] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1505.207969] env[62405]: DEBUG oslo_vmware.api [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5244afa0-839a-4cd0-b3bb-efc871ab5ff9, 'name': SearchDatastore_Task, 'duration_secs': 0.017016} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1505.207969] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c75d4e0f-dddf-4b3b-8e24-efe326ae692f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.225517] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ae4ec299-208d-46af-a1ef-f92dc90a6c31 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.233234] env[62405]: DEBUG oslo_vmware.api [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Waiting for the task: (returnval){ [ 1505.233234] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52e0dd5a-18cd-37e5-65f9-27e65c0fe552" [ 1505.233234] env[62405]: _type = "Task" [ 1505.233234] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1505.240142] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1505.240142] env[62405]: value = "task-1946788" [ 1505.240142] env[62405]: _type = "Task" [ 1505.240142] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1505.254460] env[62405]: DEBUG oslo_vmware.api [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52e0dd5a-18cd-37e5-65f9-27e65c0fe552, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1505.258586] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946788, 'name': CreateVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1505.358338] env[62405]: DEBUG oslo_vmware.api [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Task: {'id': task-1946780, 'name': PowerOnVM_Task, 'duration_secs': 2.311653} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1505.362883] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] [instance: 7db1b086-942e-4890-8750-0d717e522786] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1505.363812] env[62405]: INFO nova.compute.manager [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] [instance: 7db1b086-942e-4890-8750-0d717e522786] Took 13.49 seconds to spawn the instance on the hypervisor. [ 1505.364248] env[62405]: DEBUG nova.compute.manager [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] [instance: 7db1b086-942e-4890-8750-0d717e522786] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1505.365331] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8da10bc8-7573-43d6-b410-c6b2b524149c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.376809] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946786, 'name': CreateVM_Task, 'duration_secs': 1.499032} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1505.380290] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8995f9cb-8454-4a98-9090-290f87f8af18] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1505.386386] env[62405]: DEBUG nova.network.neutron [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b8ff115b-64f1-4584-afa2-478c5e6b726b] Successfully updated port: 531c83a1-6a38-4d64-8757-3ffee5c271ee {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1505.388257] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1505.394408] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1505.394408] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1505.394408] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-033a8ecf-bb73-4856-92c0-cdbdeb9436bd {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.423251] env[62405]: DEBUG oslo_vmware.api [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Waiting for the task: (returnval){ [ 1505.423251] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52d4c273-5100-bf15-25db-dcee25e945f3" [ 1505.423251] env[62405]: _type = "Task" [ 1505.423251] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1505.423251] env[62405]: INFO nova.compute.manager [-] [instance: 2257c786-54f9-441a-832c-cf3178bfcc78] Took 1.12 seconds to deallocate network for instance. [ 1505.423251] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946787, 'name': CreateVM_Task, 'duration_secs': 0.766738} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1505.426040] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a1d35009-ea11-4e64-bbe4-604ed39d08f4] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1505.433831] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1505.441717] env[62405]: DEBUG oslo_vmware.api [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52d4c273-5100-bf15-25db-dcee25e945f3, 'name': SearchDatastore_Task, 'duration_secs': 0.016322} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1505.442097] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1505.442198] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: 8995f9cb-8454-4a98-9090-290f87f8af18] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1505.442474] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1505.442686] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1505.443689] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1505.443689] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cc567fe5-1fb0-466a-9195-dc470d050a9f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.450375] env[62405]: DEBUG oslo_vmware.api [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Waiting for the task: (returnval){ [ 1505.450375] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52bb5080-b21a-2f8a-1a5d-061f77ce954b" [ 1505.450375] env[62405]: _type = "Task" [ 1505.450375] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1505.460147] env[62405]: DEBUG oslo_vmware.api [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52bb5080-b21a-2f8a-1a5d-061f77ce954b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1505.527431] env[62405]: DEBUG nova.compute.manager [None req-7bb24fb1-ca55-4d29-81b1-3d8c80e2f6fe tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] [instance: 3c0b964f-c900-4704-ae12-7eba7952f678] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1505.527431] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-7bb24fb1-ca55-4d29-81b1-3d8c80e2f6fe tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] [instance: 3c0b964f-c900-4704-ae12-7eba7952f678] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1505.527431] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f31fc405-6d03-4cac-855d-cdf22bfa4394 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.537277] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-7bb24fb1-ca55-4d29-81b1-3d8c80e2f6fe tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] [instance: 3c0b964f-c900-4704-ae12-7eba7952f678] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1505.537558] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fa730f05-6bb2-48a2-a7df-52bf1c6d1725 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.549044] env[62405]: DEBUG nova.compute.utils [None req-e791cc57-ae0f-4404-8e7f-e93fe6fdcaf7 tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1505.553019] env[62405]: DEBUG oslo_vmware.api [None req-7bb24fb1-ca55-4d29-81b1-3d8c80e2f6fe tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Waiting for the task: (returnval){ [ 1505.553019] env[62405]: value = "task-1946790" [ 1505.553019] env[62405]: _type = "Task" [ 1505.553019] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1505.553019] env[62405]: DEBUG nova.compute.manager [None req-e791cc57-ae0f-4404-8e7f-e93fe6fdcaf7 tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] [instance: 3f9849b8-6aaa-4d32-b140-207d5b54d68f] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1505.553019] env[62405]: DEBUG nova.network.neutron [None req-e791cc57-ae0f-4404-8e7f-e93fe6fdcaf7 tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] [instance: 3f9849b8-6aaa-4d32-b140-207d5b54d68f] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1505.570258] env[62405]: DEBUG nova.compute.manager [None req-e64ab66a-be13-4796-b011-3af711f24e74 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b3647042-89a1-4d15-b85e-49a5c8def1d4] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1505.573473] env[62405]: DEBUG oslo_vmware.api [None req-7bb24fb1-ca55-4d29-81b1-3d8c80e2f6fe tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Task: {'id': task-1946790, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1505.617966] env[62405]: DEBUG nova.policy [None req-e791cc57-ae0f-4404-8e7f-e93fe6fdcaf7 tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c0a66e5df6764ea19a0bfd6e0e833faf', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9f4c6a0483674c7286fb3edcb24f70d8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1505.629458] env[62405]: DEBUG nova.network.neutron [req-0a99ba2e-57c4-447f-91c5-d7055dce3f33 req-dc00ef84-4753-46e6-9dad-aa02da106f82 service nova] [instance: 7db1b086-942e-4890-8750-0d717e522786] Updated VIF entry in instance network info cache for port acb33455-b824-40fd-99bd-4628778412a0. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1505.629458] env[62405]: DEBUG nova.network.neutron [req-0a99ba2e-57c4-447f-91c5-d7055dce3f33 req-dc00ef84-4753-46e6-9dad-aa02da106f82 service nova] [instance: 7db1b086-942e-4890-8750-0d717e522786] Updating instance_info_cache with network_info: [{"id": "acb33455-b824-40fd-99bd-4628778412a0", "address": "fa:16:3e:c6:b0:18", "network": {"id": "24a4e5e0-178e-4713-b3b3-db2044169596", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1947982707-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4ac6737e7e8649d5a1061806cb927ed6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bfae3ef8-cae7-455d-8632-ba93e1671625", "external-id": "cl2-zone-841", "segmentation_id": 841, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapacb33455-b8", "ovs_interfaceid": "acb33455-b824-40fd-99bd-4628778412a0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1505.752865] env[62405]: DEBUG oslo_vmware.api [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52e0dd5a-18cd-37e5-65f9-27e65c0fe552, 'name': SearchDatastore_Task, 'duration_secs': 0.025626} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1505.753705] env[62405]: DEBUG oslo_concurrency.lockutils [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1505.754305] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 02abae6c-8962-49eb-8fa9-36b13a20eff1/02abae6c-8962-49eb-8fa9-36b13a20eff1.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1505.754641] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1505.755030] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1505.755121] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-89494de5-2ca6-4902-adbf-572368b8792d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.761971] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4eb20246-0fa9-4a33-bae0-6839aa80f9a2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.763436] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946788, 'name': CreateVM_Task} progress is 25%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1505.770575] env[62405]: DEBUG oslo_vmware.api [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Waiting for the task: (returnval){ [ 1505.770575] env[62405]: value = "task-1946791" [ 1505.770575] env[62405]: _type = "Task" [ 1505.770575] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1505.771953] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1505.772308] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1505.778738] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-186d64be-af4f-4d67-bd54-2f199084e578 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.786842] env[62405]: DEBUG oslo_vmware.api [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Waiting for the task: (returnval){ [ 1505.786842] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]527bc3ca-95c7-e921-ca9c-7e346de41861" [ 1505.786842] env[62405]: _type = "Task" [ 1505.786842] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1505.792827] env[62405]: DEBUG oslo_vmware.api [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Task: {'id': task-1946791, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1505.807400] env[62405]: DEBUG oslo_vmware.api [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]527bc3ca-95c7-e921-ca9c-7e346de41861, 'name': SearchDatastore_Task, 'duration_secs': 0.011971} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1505.808472] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-03a2e580-bc89-4702-b139-cc5c23a7fca5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.818071] env[62405]: DEBUG oslo_vmware.api [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Waiting for the task: (returnval){ [ 1505.818071] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52f4c5fa-c45e-9d08-6085-40aef1cd6a54" [ 1505.818071] env[62405]: _type = "Task" [ 1505.818071] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1505.827989] env[62405]: DEBUG oslo_vmware.api [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52f4c5fa-c45e-9d08-6085-40aef1cd6a54, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1505.894711] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Acquiring lock "refresh_cache-b8ff115b-64f1-4584-afa2-478c5e6b726b" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1505.898030] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Acquired lock "refresh_cache-b8ff115b-64f1-4584-afa2-478c5e6b726b" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1505.898030] env[62405]: DEBUG nova.network.neutron [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b8ff115b-64f1-4584-afa2-478c5e6b726b] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1505.898030] env[62405]: INFO nova.compute.manager [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] [instance: 7db1b086-942e-4890-8750-0d717e522786] Took 22.00 seconds to build instance. [ 1505.914770] env[62405]: DEBUG nova.compute.manager [req-54096c80-d521-4e1b-9b06-9b9c03a0d35c req-ce21e46d-4f6a-4eb2-9e7b-e000b187f231 service nova] [instance: 02abae6c-8962-49eb-8fa9-36b13a20eff1] Received event network-vif-plugged-77f01bbe-48b5-4ad3-b215-90ff9d429d0b {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1505.914824] env[62405]: DEBUG oslo_concurrency.lockutils [req-54096c80-d521-4e1b-9b06-9b9c03a0d35c req-ce21e46d-4f6a-4eb2-9e7b-e000b187f231 service nova] Acquiring lock "02abae6c-8962-49eb-8fa9-36b13a20eff1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1505.915481] env[62405]: DEBUG oslo_concurrency.lockutils [req-54096c80-d521-4e1b-9b06-9b9c03a0d35c req-ce21e46d-4f6a-4eb2-9e7b-e000b187f231 service nova] Lock "02abae6c-8962-49eb-8fa9-36b13a20eff1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1505.915810] env[62405]: DEBUG oslo_concurrency.lockutils [req-54096c80-d521-4e1b-9b06-9b9c03a0d35c req-ce21e46d-4f6a-4eb2-9e7b-e000b187f231 service nova] Lock "02abae6c-8962-49eb-8fa9-36b13a20eff1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1505.916068] env[62405]: DEBUG nova.compute.manager [req-54096c80-d521-4e1b-9b06-9b9c03a0d35c req-ce21e46d-4f6a-4eb2-9e7b-e000b187f231 service nova] [instance: 02abae6c-8962-49eb-8fa9-36b13a20eff1] No waiting events found dispatching network-vif-plugged-77f01bbe-48b5-4ad3-b215-90ff9d429d0b {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1505.916401] env[62405]: WARNING nova.compute.manager [req-54096c80-d521-4e1b-9b06-9b9c03a0d35c req-ce21e46d-4f6a-4eb2-9e7b-e000b187f231 service nova] [instance: 02abae6c-8962-49eb-8fa9-36b13a20eff1] Received unexpected event network-vif-plugged-77f01bbe-48b5-4ad3-b215-90ff9d429d0b for instance with vm_state building and task_state spawning. [ 1505.916875] env[62405]: DEBUG nova.compute.manager [req-54096c80-d521-4e1b-9b06-9b9c03a0d35c req-ce21e46d-4f6a-4eb2-9e7b-e000b187f231 service nova] [instance: 02abae6c-8962-49eb-8fa9-36b13a20eff1] Received event network-changed-77f01bbe-48b5-4ad3-b215-90ff9d429d0b {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1505.917262] env[62405]: DEBUG nova.compute.manager [req-54096c80-d521-4e1b-9b06-9b9c03a0d35c req-ce21e46d-4f6a-4eb2-9e7b-e000b187f231 service nova] [instance: 02abae6c-8962-49eb-8fa9-36b13a20eff1] Refreshing instance network info cache due to event network-changed-77f01bbe-48b5-4ad3-b215-90ff9d429d0b. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1505.918535] env[62405]: DEBUG oslo_concurrency.lockutils [req-54096c80-d521-4e1b-9b06-9b9c03a0d35c req-ce21e46d-4f6a-4eb2-9e7b-e000b187f231 service nova] Acquiring lock "refresh_cache-02abae6c-8962-49eb-8fa9-36b13a20eff1" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1505.918535] env[62405]: DEBUG oslo_concurrency.lockutils [req-54096c80-d521-4e1b-9b06-9b9c03a0d35c req-ce21e46d-4f6a-4eb2-9e7b-e000b187f231 service nova] Acquired lock "refresh_cache-02abae6c-8962-49eb-8fa9-36b13a20eff1" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1505.918535] env[62405]: DEBUG nova.network.neutron [req-54096c80-d521-4e1b-9b06-9b9c03a0d35c req-ce21e46d-4f6a-4eb2-9e7b-e000b187f231 service nova] [instance: 02abae6c-8962-49eb-8fa9-36b13a20eff1] Refreshing network info cache for port 77f01bbe-48b5-4ad3-b215-90ff9d429d0b {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1505.938859] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ca3b109f-8056-46fe-bbbd-a9b0e2e826e1 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1505.968068] env[62405]: DEBUG oslo_vmware.api [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52bb5080-b21a-2f8a-1a5d-061f77ce954b, 'name': SearchDatastore_Task, 'duration_secs': 0.010084} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1505.968068] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1505.968068] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: a1d35009-ea11-4e64-bbe4-604ed39d08f4] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1505.968330] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1506.059084] env[62405]: DEBUG nova.compute.manager [None req-e791cc57-ae0f-4404-8e7f-e93fe6fdcaf7 tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] [instance: 3f9849b8-6aaa-4d32-b140-207d5b54d68f] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1506.084327] env[62405]: DEBUG oslo_vmware.api [None req-7bb24fb1-ca55-4d29-81b1-3d8c80e2f6fe tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Task: {'id': task-1946790, 'name': PowerOffVM_Task, 'duration_secs': 0.2086} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1506.084327] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-7bb24fb1-ca55-4d29-81b1-3d8c80e2f6fe tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] [instance: 3c0b964f-c900-4704-ae12-7eba7952f678] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1506.084741] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-7bb24fb1-ca55-4d29-81b1-3d8c80e2f6fe tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] [instance: 3c0b964f-c900-4704-ae12-7eba7952f678] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1506.085953] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-052f0cc0-0624-45d3-b44e-7d722fefccb7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.111533] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e64ab66a-be13-4796-b011-3af711f24e74 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1506.132571] env[62405]: DEBUG oslo_concurrency.lockutils [req-0a99ba2e-57c4-447f-91c5-d7055dce3f33 req-dc00ef84-4753-46e6-9dad-aa02da106f82 service nova] Releasing lock "refresh_cache-7db1b086-942e-4890-8750-0d717e522786" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1506.152509] env[62405]: DEBUG nova.network.neutron [None req-e791cc57-ae0f-4404-8e7f-e93fe6fdcaf7 tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] [instance: 3f9849b8-6aaa-4d32-b140-207d5b54d68f] Successfully created port: ec10e8ab-572c-4bfa-810d-befff7776996 {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1506.241984] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-7bb24fb1-ca55-4d29-81b1-3d8c80e2f6fe tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] [instance: 3c0b964f-c900-4704-ae12-7eba7952f678] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1506.243289] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-7bb24fb1-ca55-4d29-81b1-3d8c80e2f6fe tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] [instance: 3c0b964f-c900-4704-ae12-7eba7952f678] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1506.245032] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bb24fb1-ca55-4d29-81b1-3d8c80e2f6fe tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Deleting the datastore file [datastore1] 3c0b964f-c900-4704-ae12-7eba7952f678 {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1506.245032] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3dc14812-6458-42df-a3a2-dae6dcecdbed {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.272319] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946788, 'name': CreateVM_Task, 'duration_secs': 0.745169} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1506.272319] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ca0aca02-4b99-4393-900c-b9cb0dad55c7] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1506.272603] env[62405]: DEBUG oslo_vmware.api [None req-7bb24fb1-ca55-4d29-81b1-3d8c80e2f6fe tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Waiting for the task: (returnval){ [ 1506.272603] env[62405]: value = "task-1946793" [ 1506.272603] env[62405]: _type = "Task" [ 1506.272603] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1506.273712] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1506.273915] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1506.274401] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1506.274809] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4a463fa5-83f1-47a7-bbe6-640478269b3a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.291841] env[62405]: DEBUG oslo_vmware.api [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Waiting for the task: (returnval){ [ 1506.291841] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52bd059e-0eb6-61c0-d7eb-d78b19644795" [ 1506.291841] env[62405]: _type = "Task" [ 1506.291841] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1506.299418] env[62405]: DEBUG oslo_vmware.api [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Task: {'id': task-1946791, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1506.299725] env[62405]: DEBUG oslo_vmware.api [None req-7bb24fb1-ca55-4d29-81b1-3d8c80e2f6fe tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Task: {'id': task-1946793, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1506.313821] env[62405]: DEBUG oslo_vmware.api [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52bd059e-0eb6-61c0-d7eb-d78b19644795, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1506.336533] env[62405]: DEBUG oslo_vmware.api [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52f4c5fa-c45e-9d08-6085-40aef1cd6a54, 'name': SearchDatastore_Task, 'duration_secs': 0.011856} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1506.338813] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1506.338813] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 8995f9cb-8454-4a98-9090-290f87f8af18/8995f9cb-8454-4a98-9090-290f87f8af18.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1506.338813] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1506.338813] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1506.339183] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-432795de-27b1-43be-adc7-a0f4e93e0858 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.345595] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e9c0e686-b3c1-4657-8b0b-26a3b666ab6f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.351834] env[62405]: DEBUG oslo_vmware.api [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Waiting for the task: (returnval){ [ 1506.351834] env[62405]: value = "task-1946794" [ 1506.351834] env[62405]: _type = "Task" [ 1506.351834] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1506.357187] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1506.358100] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1506.365310] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d0bb4e9e-438f-4532-ad42-757d574e35bb {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.378274] env[62405]: DEBUG oslo_vmware.api [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Waiting for the task: (returnval){ [ 1506.378274] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]524a82eb-41fa-b14a-e999-6546a1527eed" [ 1506.378274] env[62405]: _type = "Task" [ 1506.378274] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1506.378582] env[62405]: DEBUG oslo_vmware.api [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Task: {'id': task-1946794, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1506.392435] env[62405]: DEBUG oslo_vmware.api [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]524a82eb-41fa-b14a-e999-6546a1527eed, 'name': SearchDatastore_Task, 'duration_secs': 0.013979} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1506.393384] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-11a2e75f-641e-4a77-a254-251ee0b6206e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.402477] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c7e041bc-d071-4e24-ac05-9202f5099bab tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Lock "7db1b086-942e-4890-8750-0d717e522786" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.515s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1506.403203] env[62405]: DEBUG oslo_vmware.api [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Waiting for the task: (returnval){ [ 1506.403203] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52c8e248-1586-4c71-6214-0db2a5d131ca" [ 1506.403203] env[62405]: _type = "Task" [ 1506.403203] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1506.419191] env[62405]: DEBUG oslo_vmware.api [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52c8e248-1586-4c71-6214-0db2a5d131ca, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1506.487998] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Acquiring lock "15218373-ffa5-49ce-b604-423b7fc5fb35" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1506.488395] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Lock "15218373-ffa5-49ce-b604-423b7fc5fb35" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1506.499438] env[62405]: DEBUG nova.network.neutron [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b8ff115b-64f1-4584-afa2-478c5e6b726b] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1506.565074] env[62405]: INFO nova.virt.block_device [None req-e791cc57-ae0f-4404-8e7f-e93fe6fdcaf7 tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] [instance: 3f9849b8-6aaa-4d32-b140-207d5b54d68f] Booting with volume 09314eff-d5f0-4a4a-a4b2-f7844bc0cf35 at /dev/sda [ 1506.656553] env[62405]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-40b39075-ee40-4143-a405-9a7572ecee56 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.671945] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39740058-bf5e-4738-a2b7-39e352697086 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.684333] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec62a6ae-4fa3-4d44-a055-e6d77baa0b7b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.704510] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d00904d-944f-4e5b-be25-221d63bacca0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.725773] env[62405]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d724b50e-bbcc-4b6a-9760-373a7357c738 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.762352] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-751a438e-f072-4a43-92c4-abc7936f2d5d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.767885] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10b428ed-ac38-4214-9ce9-172ac3d8f838 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.792470] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a7991ab-29e9-4784-add8-dacf42a23758 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.802567] env[62405]: DEBUG oslo_vmware.api [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Task: {'id': task-1946791, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.540133} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1506.818694] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 02abae6c-8962-49eb-8fa9-36b13a20eff1/02abae6c-8962-49eb-8fa9-36b13a20eff1.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1506.818694] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] [instance: 02abae6c-8962-49eb-8fa9-36b13a20eff1] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1506.829865] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4010ea82-a05b-4a35-884b-37c93fe21557 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.833248] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d38210e3-15fe-48c7-869a-c95320af4dc9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.835648] env[62405]: DEBUG nova.compute.provider_tree [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1506.837311] env[62405]: DEBUG oslo_vmware.api [None req-7bb24fb1-ca55-4d29-81b1-3d8c80e2f6fe tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Task: {'id': task-1946793, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.20451} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1506.838379] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bb24fb1-ca55-4d29-81b1-3d8c80e2f6fe tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1506.838574] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-7bb24fb1-ca55-4d29-81b1-3d8c80e2f6fe tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] [instance: 3c0b964f-c900-4704-ae12-7eba7952f678] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1506.838750] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-7bb24fb1-ca55-4d29-81b1-3d8c80e2f6fe tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] [instance: 3c0b964f-c900-4704-ae12-7eba7952f678] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1506.839785] env[62405]: INFO nova.compute.manager [None req-7bb24fb1-ca55-4d29-81b1-3d8c80e2f6fe tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] [instance: 3c0b964f-c900-4704-ae12-7eba7952f678] Took 1.31 seconds to destroy the instance on the hypervisor. [ 1506.839785] env[62405]: DEBUG oslo.service.loopingcall [None req-7bb24fb1-ca55-4d29-81b1-3d8c80e2f6fe tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1506.840208] env[62405]: DEBUG nova.compute.manager [-] [instance: 3c0b964f-c900-4704-ae12-7eba7952f678] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1506.840208] env[62405]: DEBUG nova.network.neutron [-] [instance: 3c0b964f-c900-4704-ae12-7eba7952f678] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1506.850129] env[62405]: DEBUG oslo_vmware.api [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52bd059e-0eb6-61c0-d7eb-d78b19644795, 'name': SearchDatastore_Task, 'duration_secs': 0.024333} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1506.853496] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2ebb3dd-fb6b-47e5-b24e-cb4b922926dc {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.856288] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1506.856414] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: ca0aca02-4b99-4393-900c-b9cb0dad55c7] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1506.856594] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1506.856929] env[62405]: DEBUG oslo_vmware.api [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Waiting for the task: (returnval){ [ 1506.856929] env[62405]: value = "task-1946795" [ 1506.856929] env[62405]: _type = "Task" [ 1506.856929] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1506.874911] env[62405]: DEBUG oslo_vmware.api [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Task: {'id': task-1946795, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1506.882114] env[62405]: DEBUG oslo_vmware.api [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Task: {'id': task-1946794, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1506.882744] env[62405]: DEBUG nova.virt.block_device [None req-e791cc57-ae0f-4404-8e7f-e93fe6fdcaf7 tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] [instance: 3f9849b8-6aaa-4d32-b140-207d5b54d68f] Updating existing volume attachment record: e85ff6fb-ee85-441e-8290-104159212db4 {{(pid=62405) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1506.907322] env[62405]: DEBUG nova.compute.manager [req-bbfe44ec-1116-4359-bf77-52e562b3df89 req-22024b9a-8b6a-42ba-8078-f4ce5e2d520d service nova] [instance: 0491dc4b-cf35-4035-aca9-baf43b86af7e] Received event network-changed-19538d37-e369-4f7b-8051-61d2c0a7fb00 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1506.907561] env[62405]: DEBUG nova.compute.manager [req-bbfe44ec-1116-4359-bf77-52e562b3df89 req-22024b9a-8b6a-42ba-8078-f4ce5e2d520d service nova] [instance: 0491dc4b-cf35-4035-aca9-baf43b86af7e] Refreshing instance network info cache due to event network-changed-19538d37-e369-4f7b-8051-61d2c0a7fb00. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1506.908213] env[62405]: DEBUG oslo_concurrency.lockutils [req-bbfe44ec-1116-4359-bf77-52e562b3df89 req-22024b9a-8b6a-42ba-8078-f4ce5e2d520d service nova] Acquiring lock "refresh_cache-0491dc4b-cf35-4035-aca9-baf43b86af7e" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1506.908369] env[62405]: DEBUG oslo_concurrency.lockutils [req-bbfe44ec-1116-4359-bf77-52e562b3df89 req-22024b9a-8b6a-42ba-8078-f4ce5e2d520d service nova] Acquired lock "refresh_cache-0491dc4b-cf35-4035-aca9-baf43b86af7e" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1506.908667] env[62405]: DEBUG nova.network.neutron [req-bbfe44ec-1116-4359-bf77-52e562b3df89 req-22024b9a-8b6a-42ba-8078-f4ce5e2d520d service nova] [instance: 0491dc4b-cf35-4035-aca9-baf43b86af7e] Refreshing network info cache for port 19538d37-e369-4f7b-8051-61d2c0a7fb00 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1506.910779] env[62405]: DEBUG nova.compute.manager [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] [instance: fbedaa93-5968-4b42-b93e-201d2b44b32b] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1506.924863] env[62405]: DEBUG oslo_vmware.api [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52c8e248-1586-4c71-6214-0db2a5d131ca, 'name': SearchDatastore_Task, 'duration_secs': 0.013518} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1506.928527] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1506.928527] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] a1d35009-ea11-4e64-bbe4-604ed39d08f4/a1d35009-ea11-4e64-bbe4-604ed39d08f4.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1506.929076] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1506.929076] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1506.929205] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3bc47e3c-4637-44f0-9f8b-63ab0ad32020 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.931508] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-72c33015-2456-47bc-9458-82f4f807070d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.945301] env[62405]: DEBUG oslo_vmware.api [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Waiting for the task: (returnval){ [ 1506.945301] env[62405]: value = "task-1946796" [ 1506.945301] env[62405]: _type = "Task" [ 1506.945301] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1506.955722] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1506.955722] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1506.958475] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d9cad2fb-ec23-4c14-b0a3-9cb3a79d49d7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.967602] env[62405]: DEBUG oslo_vmware.api [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Waiting for the task: (returnval){ [ 1506.967602] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52d69d59-f35b-716f-322d-3c63f499c0f4" [ 1506.967602] env[62405]: _type = "Task" [ 1506.967602] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1506.973166] env[62405]: DEBUG oslo_vmware.api [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Task: {'id': task-1946796, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1506.986244] env[62405]: DEBUG oslo_vmware.api [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52d69d59-f35b-716f-322d-3c63f499c0f4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1507.099064] env[62405]: DEBUG nova.network.neutron [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b8ff115b-64f1-4584-afa2-478c5e6b726b] Updating instance_info_cache with network_info: [{"id": "531c83a1-6a38-4d64-8757-3ffee5c271ee", "address": "fa:16:3e:0a:f4:a9", "network": {"id": "9e3e6e3d-df77-428f-b577-e4a42e82ec43", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.182", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "69dc3a146cd14230b1180689e2fea090", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31e77685-b4dd-4810-80ef-24115ea9ea62", "external-id": "nsx-vlan-transportzone-56", "segmentation_id": 56, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap531c83a1-6a", "ovs_interfaceid": "531c83a1-6a38-4d64-8757-3ffee5c271ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1507.213088] env[62405]: DEBUG nova.network.neutron [req-54096c80-d521-4e1b-9b06-9b9c03a0d35c req-ce21e46d-4f6a-4eb2-9e7b-e000b187f231 service nova] [instance: 02abae6c-8962-49eb-8fa9-36b13a20eff1] Updated VIF entry in instance network info cache for port 77f01bbe-48b5-4ad3-b215-90ff9d429d0b. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1507.213620] env[62405]: DEBUG nova.network.neutron [req-54096c80-d521-4e1b-9b06-9b9c03a0d35c req-ce21e46d-4f6a-4eb2-9e7b-e000b187f231 service nova] [instance: 02abae6c-8962-49eb-8fa9-36b13a20eff1] Updating instance_info_cache with network_info: [{"id": "77f01bbe-48b5-4ad3-b215-90ff9d429d0b", "address": "fa:16:3e:07:19:b6", "network": {"id": "9e3e6e3d-df77-428f-b577-e4a42e82ec43", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.28", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "69dc3a146cd14230b1180689e2fea090", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31e77685-b4dd-4810-80ef-24115ea9ea62", "external-id": "nsx-vlan-transportzone-56", "segmentation_id": 56, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap77f01bbe-48", "ovs_interfaceid": "77f01bbe-48b5-4ad3-b215-90ff9d429d0b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1507.339168] env[62405]: DEBUG nova.scheduler.client.report [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1507.380836] env[62405]: DEBUG oslo_vmware.api [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Task: {'id': task-1946795, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.236879} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1507.385405] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] [instance: 02abae6c-8962-49eb-8fa9-36b13a20eff1] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1507.386371] env[62405]: DEBUG oslo_vmware.api [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Task: {'id': task-1946794, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.617754} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1507.387603] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89571338-0150-4b64-a15f-87834442a78b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.392806] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 8995f9cb-8454-4a98-9090-290f87f8af18/8995f9cb-8454-4a98-9090-290f87f8af18.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1507.392806] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: 8995f9cb-8454-4a98-9090-290f87f8af18] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1507.392806] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5819ddc0-e99b-4c34-aef3-90d15f90d6cb {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.425693] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] [instance: 02abae6c-8962-49eb-8fa9-36b13a20eff1] Reconfiguring VM instance instance-0000000b to attach disk [datastore1] 02abae6c-8962-49eb-8fa9-36b13a20eff1/02abae6c-8962-49eb-8fa9-36b13a20eff1.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1507.435033] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b247d807-040b-4224-8dc0-5ff610ec9872 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.451574] env[62405]: DEBUG oslo_vmware.api [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Waiting for the task: (returnval){ [ 1507.451574] env[62405]: value = "task-1946797" [ 1507.451574] env[62405]: _type = "Task" [ 1507.451574] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1507.475827] env[62405]: DEBUG oslo_vmware.api [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Task: {'id': task-1946796, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1507.476390] env[62405]: DEBUG oslo_vmware.api [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Waiting for the task: (returnval){ [ 1507.476390] env[62405]: value = "task-1946799" [ 1507.476390] env[62405]: _type = "Task" [ 1507.476390] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1507.476584] env[62405]: DEBUG oslo_vmware.api [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Task: {'id': task-1946797, 'name': ExtendVirtualDisk_Task} progress is 50%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1507.477761] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1507.496090] env[62405]: DEBUG oslo_vmware.api [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Task: {'id': task-1946799, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1507.496406] env[62405]: DEBUG oslo_vmware.api [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52d69d59-f35b-716f-322d-3c63f499c0f4, 'name': SearchDatastore_Task, 'duration_secs': 0.065854} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1507.497230] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f3096541-1c28-4bf1-b6af-34a60bb8b47d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.506600] env[62405]: DEBUG oslo_vmware.api [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Waiting for the task: (returnval){ [ 1507.506600] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52df8d2a-106b-976c-d936-54e533411149" [ 1507.506600] env[62405]: _type = "Task" [ 1507.506600] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1507.516188] env[62405]: DEBUG oslo_vmware.api [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52df8d2a-106b-976c-d936-54e533411149, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1507.602289] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Releasing lock "refresh_cache-b8ff115b-64f1-4584-afa2-478c5e6b726b" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1507.602650] env[62405]: DEBUG nova.compute.manager [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b8ff115b-64f1-4584-afa2-478c5e6b726b] Instance network_info: |[{"id": "531c83a1-6a38-4d64-8757-3ffee5c271ee", "address": "fa:16:3e:0a:f4:a9", "network": {"id": "9e3e6e3d-df77-428f-b577-e4a42e82ec43", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.182", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "69dc3a146cd14230b1180689e2fea090", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31e77685-b4dd-4810-80ef-24115ea9ea62", "external-id": "nsx-vlan-transportzone-56", "segmentation_id": 56, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap531c83a1-6a", "ovs_interfaceid": "531c83a1-6a38-4d64-8757-3ffee5c271ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1507.603267] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b8ff115b-64f1-4584-afa2-478c5e6b726b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0a:f4:a9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '31e77685-b4dd-4810-80ef-24115ea9ea62', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '531c83a1-6a38-4d64-8757-3ffee5c271ee', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1507.611753] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Creating folder: Project (d5178d8cc12f46f3a8599384d4be9b6b). Parent ref: group-v401284. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1507.612127] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-61c3825f-aacb-4759-a9e3-da507d697de4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.627133] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Created folder: Project (d5178d8cc12f46f3a8599384d4be9b6b) in parent group-v401284. [ 1507.627133] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Creating folder: Instances. Parent ref: group-v401330. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1507.627332] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4e255772-fc67-4e4f-8a9a-3a0190a4cc55 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.641966] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Created folder: Instances in parent group-v401330. [ 1507.642291] env[62405]: DEBUG oslo.service.loopingcall [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1507.642540] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b8ff115b-64f1-4584-afa2-478c5e6b726b] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1507.642873] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4cb9fd71-26f9-4eef-847a-6d9ace6ec2bc {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.665897] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1507.665897] env[62405]: value = "task-1946802" [ 1507.665897] env[62405]: _type = "Task" [ 1507.665897] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1507.680831] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946802, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1507.717987] env[62405]: DEBUG oslo_concurrency.lockutils [req-54096c80-d521-4e1b-9b06-9b9c03a0d35c req-ce21e46d-4f6a-4eb2-9e7b-e000b187f231 service nova] Releasing lock "refresh_cache-02abae6c-8962-49eb-8fa9-36b13a20eff1" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1507.718212] env[62405]: DEBUG nova.compute.manager [req-54096c80-d521-4e1b-9b06-9b9c03a0d35c req-ce21e46d-4f6a-4eb2-9e7b-e000b187f231 service nova] [instance: 8995f9cb-8454-4a98-9090-290f87f8af18] Received event network-vif-plugged-4582fcd2-4721-4ad7-9452-5b808488dcb2 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1507.718326] env[62405]: DEBUG oslo_concurrency.lockutils [req-54096c80-d521-4e1b-9b06-9b9c03a0d35c req-ce21e46d-4f6a-4eb2-9e7b-e000b187f231 service nova] Acquiring lock "8995f9cb-8454-4a98-9090-290f87f8af18-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1507.718535] env[62405]: DEBUG oslo_concurrency.lockutils [req-54096c80-d521-4e1b-9b06-9b9c03a0d35c req-ce21e46d-4f6a-4eb2-9e7b-e000b187f231 service nova] Lock "8995f9cb-8454-4a98-9090-290f87f8af18-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1507.718751] env[62405]: DEBUG oslo_concurrency.lockutils [req-54096c80-d521-4e1b-9b06-9b9c03a0d35c req-ce21e46d-4f6a-4eb2-9e7b-e000b187f231 service nova] Lock "8995f9cb-8454-4a98-9090-290f87f8af18-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1507.718829] env[62405]: DEBUG nova.compute.manager [req-54096c80-d521-4e1b-9b06-9b9c03a0d35c req-ce21e46d-4f6a-4eb2-9e7b-e000b187f231 service nova] [instance: 8995f9cb-8454-4a98-9090-290f87f8af18] No waiting events found dispatching network-vif-plugged-4582fcd2-4721-4ad7-9452-5b808488dcb2 {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1507.719053] env[62405]: WARNING nova.compute.manager [req-54096c80-d521-4e1b-9b06-9b9c03a0d35c req-ce21e46d-4f6a-4eb2-9e7b-e000b187f231 service nova] [instance: 8995f9cb-8454-4a98-9090-290f87f8af18] Received unexpected event network-vif-plugged-4582fcd2-4721-4ad7-9452-5b808488dcb2 for instance with vm_state building and task_state spawning. [ 1507.720284] env[62405]: DEBUG nova.compute.manager [req-54096c80-d521-4e1b-9b06-9b9c03a0d35c req-ce21e46d-4f6a-4eb2-9e7b-e000b187f231 service nova] [instance: 8995f9cb-8454-4a98-9090-290f87f8af18] Received event network-changed-4582fcd2-4721-4ad7-9452-5b808488dcb2 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1507.720284] env[62405]: DEBUG nova.compute.manager [req-54096c80-d521-4e1b-9b06-9b9c03a0d35c req-ce21e46d-4f6a-4eb2-9e7b-e000b187f231 service nova] [instance: 8995f9cb-8454-4a98-9090-290f87f8af18] Refreshing instance network info cache due to event network-changed-4582fcd2-4721-4ad7-9452-5b808488dcb2. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1507.720284] env[62405]: DEBUG oslo_concurrency.lockutils [req-54096c80-d521-4e1b-9b06-9b9c03a0d35c req-ce21e46d-4f6a-4eb2-9e7b-e000b187f231 service nova] Acquiring lock "refresh_cache-8995f9cb-8454-4a98-9090-290f87f8af18" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1507.720284] env[62405]: DEBUG oslo_concurrency.lockutils [req-54096c80-d521-4e1b-9b06-9b9c03a0d35c req-ce21e46d-4f6a-4eb2-9e7b-e000b187f231 service nova] Acquired lock "refresh_cache-8995f9cb-8454-4a98-9090-290f87f8af18" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1507.720284] env[62405]: DEBUG nova.network.neutron [req-54096c80-d521-4e1b-9b06-9b9c03a0d35c req-ce21e46d-4f6a-4eb2-9e7b-e000b187f231 service nova] [instance: 8995f9cb-8454-4a98-9090-290f87f8af18] Refreshing network info cache for port 4582fcd2-4721-4ad7-9452-5b808488dcb2 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1507.790125] env[62405]: DEBUG nova.compute.manager [None req-14ed0941-12b7-4219-9ade-8830ebc34cc2 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 058682a1-5240-4414-9203-c612ecd12999] Stashing vm_state: active {{(pid=62405) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1507.847388] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.801s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1507.847927] env[62405]: DEBUG nova.compute.manager [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] [instance: 792cd2c8-a67d-4b16-93ab-722fcc8b622d] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1507.851232] env[62405]: DEBUG oslo_concurrency.lockutils [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.716s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1507.853315] env[62405]: INFO nova.compute.claims [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] [instance: 0eec4a5f-9f9b-4a86-a046-2e2d107adc48] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1507.929102] env[62405]: DEBUG nova.network.neutron [None req-e791cc57-ae0f-4404-8e7f-e93fe6fdcaf7 tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] [instance: 3f9849b8-6aaa-4d32-b140-207d5b54d68f] Successfully updated port: ec10e8ab-572c-4bfa-810d-befff7776996 {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1507.962928] env[62405]: DEBUG nova.network.neutron [req-bbfe44ec-1116-4359-bf77-52e562b3df89 req-22024b9a-8b6a-42ba-8078-f4ce5e2d520d service nova] [instance: 0491dc4b-cf35-4035-aca9-baf43b86af7e] Updated VIF entry in instance network info cache for port 19538d37-e369-4f7b-8051-61d2c0a7fb00. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1507.963411] env[62405]: DEBUG nova.network.neutron [req-bbfe44ec-1116-4359-bf77-52e562b3df89 req-22024b9a-8b6a-42ba-8078-f4ce5e2d520d service nova] [instance: 0491dc4b-cf35-4035-aca9-baf43b86af7e] Updating instance_info_cache with network_info: [{"id": "19538d37-e369-4f7b-8051-61d2c0a7fb00", "address": "fa:16:3e:62:6a:7a", "network": {"id": "bf574ed2-2a7e-4cf2-aa38-0adccf456674", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-2099360932-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8cf1f39c8aef41df8c86777f80980664", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60567ee6-01d0-4b16-9c7a-4a896827d6eb", "external-id": "nsx-vlan-transportzone-28", "segmentation_id": 28, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap19538d37-e3", "ovs_interfaceid": "19538d37-e369-4f7b-8051-61d2c0a7fb00", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1507.972047] env[62405]: DEBUG oslo_vmware.api [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Task: {'id': task-1946797, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.100354} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1507.974617] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: 8995f9cb-8454-4a98-9090-290f87f8af18] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1507.975159] env[62405]: DEBUG oslo_vmware.api [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Task: {'id': task-1946796, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.564107} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1507.976377] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdae10ed-e05e-4729-a01c-87f24ca2b881 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.978952] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] a1d35009-ea11-4e64-bbe4-604ed39d08f4/a1d35009-ea11-4e64-bbe4-604ed39d08f4.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1507.979266] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: a1d35009-ea11-4e64-bbe4-604ed39d08f4] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1507.981840] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9769d7d8-0512-4736-8030-03a5a4730218 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.006946] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: 8995f9cb-8454-4a98-9090-290f87f8af18] Reconfiguring VM instance instance-0000000c to attach disk [datastore1] 8995f9cb-8454-4a98-9090-290f87f8af18/8995f9cb-8454-4a98-9090-290f87f8af18.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1508.009169] env[62405]: DEBUG nova.network.neutron [-] [instance: 3c0b964f-c900-4704-ae12-7eba7952f678] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1508.010828] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ae0e81bc-d818-4e63-b934-457b98edac32 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.027406] env[62405]: DEBUG oslo_vmware.api [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Waiting for the task: (returnval){ [ 1508.027406] env[62405]: value = "task-1946803" [ 1508.027406] env[62405]: _type = "Task" [ 1508.027406] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1508.035906] env[62405]: DEBUG oslo_vmware.api [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Task: {'id': task-1946799, 'name': ReconfigVM_Task, 'duration_secs': 0.334187} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1508.040559] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] [instance: 02abae6c-8962-49eb-8fa9-36b13a20eff1] Reconfigured VM instance instance-0000000b to attach disk [datastore1] 02abae6c-8962-49eb-8fa9-36b13a20eff1/02abae6c-8962-49eb-8fa9-36b13a20eff1.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1508.043863] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fde9ff25-42b7-422b-a5d9-3ab72369e2d8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.053319] env[62405]: DEBUG oslo_vmware.api [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Waiting for the task: (returnval){ [ 1508.053319] env[62405]: value = "task-1946804" [ 1508.053319] env[62405]: _type = "Task" [ 1508.053319] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1508.053319] env[62405]: DEBUG oslo_vmware.api [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52df8d2a-106b-976c-d936-54e533411149, 'name': SearchDatastore_Task, 'duration_secs': 0.013295} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1508.053319] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1508.053319] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] ca0aca02-4b99-4393-900c-b9cb0dad55c7/ca0aca02-4b99-4393-900c-b9cb0dad55c7.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1508.059273] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-16c3e582-c09e-4df0-96b4-2d3bf181723b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.062038] env[62405]: DEBUG oslo_vmware.api [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Task: {'id': task-1946803, 'name': ExtendVirtualDisk_Task} progress is 50%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1508.064525] env[62405]: DEBUG oslo_vmware.api [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Waiting for the task: (returnval){ [ 1508.064525] env[62405]: value = "task-1946805" [ 1508.064525] env[62405]: _type = "Task" [ 1508.064525] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1508.075226] env[62405]: DEBUG oslo_vmware.api [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Task: {'id': task-1946804, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1508.080132] env[62405]: DEBUG oslo_vmware.api [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Waiting for the task: (returnval){ [ 1508.080132] env[62405]: value = "task-1946806" [ 1508.080132] env[62405]: _type = "Task" [ 1508.080132] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1508.080918] env[62405]: DEBUG oslo_vmware.api [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Task: {'id': task-1946805, 'name': Rename_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1508.091298] env[62405]: DEBUG oslo_vmware.api [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Task: {'id': task-1946806, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1508.178444] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946802, 'name': CreateVM_Task, 'duration_secs': 0.457684} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1508.178587] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b8ff115b-64f1-4584-afa2-478c5e6b726b] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1508.179324] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1508.179795] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1508.179882] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1508.180088] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6136cd62-a98d-4758-973c-4273f11d36ba {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.187045] env[62405]: DEBUG oslo_vmware.api [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Waiting for the task: (returnval){ [ 1508.187045] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52aeb078-f767-d2f2-18e1-d793652a403c" [ 1508.187045] env[62405]: _type = "Task" [ 1508.187045] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1508.201408] env[62405]: DEBUG oslo_vmware.api [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52aeb078-f767-d2f2-18e1-d793652a403c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1508.310659] env[62405]: DEBUG oslo_concurrency.lockutils [None req-14ed0941-12b7-4219-9ade-8830ebc34cc2 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1508.359792] env[62405]: DEBUG nova.compute.utils [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1508.367031] env[62405]: DEBUG nova.compute.manager [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] [instance: 792cd2c8-a67d-4b16-93ab-722fcc8b622d] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1508.367031] env[62405]: DEBUG nova.network.neutron [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] [instance: 792cd2c8-a67d-4b16-93ab-722fcc8b622d] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1508.423411] env[62405]: DEBUG nova.policy [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '951af0f23aed4a9987d76e6148e2bee6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fe2966f5756b424fbfbca6677e4d948a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1508.432047] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e791cc57-ae0f-4404-8e7f-e93fe6fdcaf7 tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] Acquiring lock "refresh_cache-3f9849b8-6aaa-4d32-b140-207d5b54d68f" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1508.432232] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e791cc57-ae0f-4404-8e7f-e93fe6fdcaf7 tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] Acquired lock "refresh_cache-3f9849b8-6aaa-4d32-b140-207d5b54d68f" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1508.432379] env[62405]: DEBUG nova.network.neutron [None req-e791cc57-ae0f-4404-8e7f-e93fe6fdcaf7 tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] [instance: 3f9849b8-6aaa-4d32-b140-207d5b54d68f] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1508.467600] env[62405]: DEBUG oslo_concurrency.lockutils [req-bbfe44ec-1116-4359-bf77-52e562b3df89 req-22024b9a-8b6a-42ba-8078-f4ce5e2d520d service nova] Releasing lock "refresh_cache-0491dc4b-cf35-4035-aca9-baf43b86af7e" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1508.468034] env[62405]: DEBUG nova.compute.manager [req-bbfe44ec-1116-4359-bf77-52e562b3df89 req-22024b9a-8b6a-42ba-8078-f4ce5e2d520d service nova] [instance: a1d35009-ea11-4e64-bbe4-604ed39d08f4] Received event network-vif-plugged-d385dca6-fc58-4113-bd50-3886fbe12d53 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1508.468145] env[62405]: DEBUG oslo_concurrency.lockutils [req-bbfe44ec-1116-4359-bf77-52e562b3df89 req-22024b9a-8b6a-42ba-8078-f4ce5e2d520d service nova] Acquiring lock "a1d35009-ea11-4e64-bbe4-604ed39d08f4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1508.468304] env[62405]: DEBUG oslo_concurrency.lockutils [req-bbfe44ec-1116-4359-bf77-52e562b3df89 req-22024b9a-8b6a-42ba-8078-f4ce5e2d520d service nova] Lock "a1d35009-ea11-4e64-bbe4-604ed39d08f4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1508.468465] env[62405]: DEBUG oslo_concurrency.lockutils [req-bbfe44ec-1116-4359-bf77-52e562b3df89 req-22024b9a-8b6a-42ba-8078-f4ce5e2d520d service nova] Lock "a1d35009-ea11-4e64-bbe4-604ed39d08f4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1508.468629] env[62405]: DEBUG nova.compute.manager [req-bbfe44ec-1116-4359-bf77-52e562b3df89 req-22024b9a-8b6a-42ba-8078-f4ce5e2d520d service nova] [instance: a1d35009-ea11-4e64-bbe4-604ed39d08f4] No waiting events found dispatching network-vif-plugged-d385dca6-fc58-4113-bd50-3886fbe12d53 {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1508.468808] env[62405]: WARNING nova.compute.manager [req-bbfe44ec-1116-4359-bf77-52e562b3df89 req-22024b9a-8b6a-42ba-8078-f4ce5e2d520d service nova] [instance: a1d35009-ea11-4e64-bbe4-604ed39d08f4] Received unexpected event network-vif-plugged-d385dca6-fc58-4113-bd50-3886fbe12d53 for instance with vm_state building and task_state spawning. [ 1508.468974] env[62405]: DEBUG nova.compute.manager [req-bbfe44ec-1116-4359-bf77-52e562b3df89 req-22024b9a-8b6a-42ba-8078-f4ce5e2d520d service nova] [instance: 8624629d-642a-4adf-984e-3925beeb4fef] Received event network-vif-deleted-15d03dd1-4edd-413d-a67d-3c877a40692a {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1508.469217] env[62405]: DEBUG nova.compute.manager [req-bbfe44ec-1116-4359-bf77-52e562b3df89 req-22024b9a-8b6a-42ba-8078-f4ce5e2d520d service nova] [instance: a1d35009-ea11-4e64-bbe4-604ed39d08f4] Received event network-changed-d385dca6-fc58-4113-bd50-3886fbe12d53 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1508.469396] env[62405]: DEBUG nova.compute.manager [req-bbfe44ec-1116-4359-bf77-52e562b3df89 req-22024b9a-8b6a-42ba-8078-f4ce5e2d520d service nova] [instance: a1d35009-ea11-4e64-bbe4-604ed39d08f4] Refreshing instance network info cache due to event network-changed-d385dca6-fc58-4113-bd50-3886fbe12d53. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1508.469589] env[62405]: DEBUG oslo_concurrency.lockutils [req-bbfe44ec-1116-4359-bf77-52e562b3df89 req-22024b9a-8b6a-42ba-8078-f4ce5e2d520d service nova] Acquiring lock "refresh_cache-a1d35009-ea11-4e64-bbe4-604ed39d08f4" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1508.469725] env[62405]: DEBUG oslo_concurrency.lockutils [req-bbfe44ec-1116-4359-bf77-52e562b3df89 req-22024b9a-8b6a-42ba-8078-f4ce5e2d520d service nova] Acquired lock "refresh_cache-a1d35009-ea11-4e64-bbe4-604ed39d08f4" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1508.469885] env[62405]: DEBUG nova.network.neutron [req-bbfe44ec-1116-4359-bf77-52e562b3df89 req-22024b9a-8b6a-42ba-8078-f4ce5e2d520d service nova] [instance: a1d35009-ea11-4e64-bbe4-604ed39d08f4] Refreshing network info cache for port d385dca6-fc58-4113-bd50-3886fbe12d53 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1508.535418] env[62405]: INFO nova.compute.manager [-] [instance: 3c0b964f-c900-4704-ae12-7eba7952f678] Took 1.70 seconds to deallocate network for instance. [ 1508.569023] env[62405]: DEBUG oslo_vmware.api [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Task: {'id': task-1946803, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073808} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1508.573907] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: a1d35009-ea11-4e64-bbe4-604ed39d08f4] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1508.579694] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c888c555-feb5-4ebd-8d93-913b499c3131 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.583529] env[62405]: DEBUG oslo_vmware.api [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Task: {'id': task-1946804, 'name': ReconfigVM_Task, 'duration_secs': 0.340847} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1508.584855] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: 8995f9cb-8454-4a98-9090-290f87f8af18] Reconfigured VM instance instance-0000000c to attach disk [datastore1] 8995f9cb-8454-4a98-9090-290f87f8af18/8995f9cb-8454-4a98-9090-290f87f8af18.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1508.590836] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3e3f09a0-887a-4371-b8a6-68ad92c89c9a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.597140] env[62405]: DEBUG oslo_vmware.api [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Task: {'id': task-1946805, 'name': Rename_Task, 'duration_secs': 0.17484} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1508.612211] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] [instance: 02abae6c-8962-49eb-8fa9-36b13a20eff1] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1508.628490] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: a1d35009-ea11-4e64-bbe4-604ed39d08f4] Reconfiguring VM instance instance-0000000d to attach disk [datastore1] a1d35009-ea11-4e64-bbe4-604ed39d08f4/a1d35009-ea11-4e64-bbe4-604ed39d08f4.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1508.634965] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-47ab14bb-8c7b-4740-90e4-9f9770bbbda4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.637419] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8a5f1d04-1d89-4a2d-8e70-c6ef0b045ef1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.655057] env[62405]: DEBUG oslo_vmware.api [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Waiting for the task: (returnval){ [ 1508.655057] env[62405]: value = "task-1946807" [ 1508.655057] env[62405]: _type = "Task" [ 1508.655057] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1508.658872] env[62405]: DEBUG oslo_vmware.api [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Task: {'id': task-1946806, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1508.668911] env[62405]: DEBUG oslo_vmware.api [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Waiting for the task: (returnval){ [ 1508.668911] env[62405]: value = "task-1946809" [ 1508.668911] env[62405]: _type = "Task" [ 1508.668911] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1508.670669] env[62405]: DEBUG oslo_vmware.api [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Waiting for the task: (returnval){ [ 1508.670669] env[62405]: value = "task-1946808" [ 1508.670669] env[62405]: _type = "Task" [ 1508.670669] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1508.682443] env[62405]: DEBUG oslo_vmware.api [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Task: {'id': task-1946807, 'name': Rename_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1508.695541] env[62405]: DEBUG oslo_vmware.api [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Task: {'id': task-1946809, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1508.695541] env[62405]: DEBUG oslo_vmware.api [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Task: {'id': task-1946808, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1508.707938] env[62405]: DEBUG oslo_vmware.api [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52aeb078-f767-d2f2-18e1-d793652a403c, 'name': SearchDatastore_Task, 'duration_secs': 0.025728} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1508.708460] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1508.708796] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b8ff115b-64f1-4584-afa2-478c5e6b726b] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1508.709113] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1508.709385] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1508.710830] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1508.710830] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-74c5122d-da6c-4cac-9c77-4ca98c6462e2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.729629] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1508.729629] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1508.730736] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b00b6a6e-f346-4a77-b033-00801289dc8b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.740511] env[62405]: DEBUG oslo_vmware.api [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Waiting for the task: (returnval){ [ 1508.740511] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52a82aee-42a3-00df-8668-c098ecfdc99b" [ 1508.740511] env[62405]: _type = "Task" [ 1508.740511] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1508.749103] env[62405]: DEBUG nova.network.neutron [req-54096c80-d521-4e1b-9b06-9b9c03a0d35c req-ce21e46d-4f6a-4eb2-9e7b-e000b187f231 service nova] [instance: 8995f9cb-8454-4a98-9090-290f87f8af18] Updated VIF entry in instance network info cache for port 4582fcd2-4721-4ad7-9452-5b808488dcb2. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1508.749748] env[62405]: DEBUG nova.network.neutron [req-54096c80-d521-4e1b-9b06-9b9c03a0d35c req-ce21e46d-4f6a-4eb2-9e7b-e000b187f231 service nova] [instance: 8995f9cb-8454-4a98-9090-290f87f8af18] Updating instance_info_cache with network_info: [{"id": "4582fcd2-4721-4ad7-9452-5b808488dcb2", "address": "fa:16:3e:f8:6b:65", "network": {"id": "1428cc25-4f53-4239-85db-9f8c1df3b565", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-160875770-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e05b3582b75842c5908781d74ee041aa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d94740a-bce8-4103-8ecf-230d02ec0a44", "external-id": "nsx-vlan-transportzone-149", "segmentation_id": 149, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4582fcd2-47", "ovs_interfaceid": "4582fcd2-4721-4ad7-9452-5b808488dcb2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1508.755667] env[62405]: DEBUG oslo_vmware.api [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52a82aee-42a3-00df-8668-c098ecfdc99b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1508.873021] env[62405]: DEBUG nova.compute.manager [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] [instance: 792cd2c8-a67d-4b16-93ab-722fcc8b622d] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1508.966050] env[62405]: DEBUG oslo_concurrency.lockutils [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquiring lock "e8ed73c3-fb86-42c3-aae6-b0c8d03149ce" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1508.966744] env[62405]: DEBUG oslo_concurrency.lockutils [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Lock "e8ed73c3-fb86-42c3-aae6-b0c8d03149ce" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1509.058404] env[62405]: DEBUG nova.compute.manager [None req-e791cc57-ae0f-4404-8e7f-e93fe6fdcaf7 tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] [instance: 3f9849b8-6aaa-4d32-b140-207d5b54d68f] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1509.058958] env[62405]: DEBUG nova.virt.hardware [None req-e791cc57-ae0f-4404-8e7f-e93fe6fdcaf7 tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1509.059231] env[62405]: DEBUG nova.virt.hardware [None req-e791cc57-ae0f-4404-8e7f-e93fe6fdcaf7 tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1509.059405] env[62405]: DEBUG nova.virt.hardware [None req-e791cc57-ae0f-4404-8e7f-e93fe6fdcaf7 tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1509.059586] env[62405]: DEBUG nova.virt.hardware [None req-e791cc57-ae0f-4404-8e7f-e93fe6fdcaf7 tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1509.059741] env[62405]: DEBUG nova.virt.hardware [None req-e791cc57-ae0f-4404-8e7f-e93fe6fdcaf7 tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1509.059901] env[62405]: DEBUG nova.virt.hardware [None req-e791cc57-ae0f-4404-8e7f-e93fe6fdcaf7 tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1509.060253] env[62405]: DEBUG nova.virt.hardware [None req-e791cc57-ae0f-4404-8e7f-e93fe6fdcaf7 tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1509.060463] env[62405]: DEBUG nova.virt.hardware [None req-e791cc57-ae0f-4404-8e7f-e93fe6fdcaf7 tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1509.060907] env[62405]: DEBUG nova.virt.hardware [None req-e791cc57-ae0f-4404-8e7f-e93fe6fdcaf7 tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1509.061131] env[62405]: DEBUG nova.virt.hardware [None req-e791cc57-ae0f-4404-8e7f-e93fe6fdcaf7 tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1509.061326] env[62405]: DEBUG nova.virt.hardware [None req-e791cc57-ae0f-4404-8e7f-e93fe6fdcaf7 tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1509.062402] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7bb24fb1-ca55-4d29-81b1-3d8c80e2f6fe tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1509.063615] env[62405]: DEBUG nova.network.neutron [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] [instance: 792cd2c8-a67d-4b16-93ab-722fcc8b622d] Successfully created port: c1f4fd8a-cda2-4206-b706-58f6fa8c722e {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1509.069802] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5584bdf4-0e61-40b5-a3d8-e98388277701 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.082499] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dacf13c2-bc1e-432b-8b43-64c35e1c2fe5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.107534] env[62405]: DEBUG oslo_vmware.api [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Task: {'id': task-1946806, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.812832} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1509.111096] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] ca0aca02-4b99-4393-900c-b9cb0dad55c7/ca0aca02-4b99-4393-900c-b9cb0dad55c7.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1509.111759] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: ca0aca02-4b99-4393-900c-b9cb0dad55c7] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1509.111867] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-88accc0e-087a-45cb-851f-06d9c73ee850 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.122073] env[62405]: DEBUG oslo_vmware.api [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Waiting for the task: (returnval){ [ 1509.122073] env[62405]: value = "task-1946810" [ 1509.122073] env[62405]: _type = "Task" [ 1509.122073] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1509.133862] env[62405]: DEBUG oslo_vmware.api [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Task: {'id': task-1946810, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1509.168686] env[62405]: DEBUG oslo_vmware.api [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Task: {'id': task-1946807, 'name': Rename_Task, 'duration_secs': 0.258609} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1509.168686] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: 8995f9cb-8454-4a98-9090-290f87f8af18] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1509.168686] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-09a3eaae-d08a-4549-9aaf-e82f8807e16b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.190230] env[62405]: DEBUG oslo_vmware.api [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Task: {'id': task-1946809, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1509.190486] env[62405]: DEBUG oslo_vmware.api [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Task: {'id': task-1946808, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1509.190820] env[62405]: DEBUG oslo_vmware.api [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Waiting for the task: (returnval){ [ 1509.190820] env[62405]: value = "task-1946811" [ 1509.190820] env[62405]: _type = "Task" [ 1509.190820] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1509.201059] env[62405]: DEBUG oslo_vmware.api [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Task: {'id': task-1946811, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1509.231375] env[62405]: DEBUG nova.network.neutron [None req-e791cc57-ae0f-4404-8e7f-e93fe6fdcaf7 tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] [instance: 3f9849b8-6aaa-4d32-b140-207d5b54d68f] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1509.254733] env[62405]: DEBUG oslo_vmware.api [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52a82aee-42a3-00df-8668-c098ecfdc99b, 'name': SearchDatastore_Task, 'duration_secs': 0.059558} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1509.260214] env[62405]: DEBUG oslo_concurrency.lockutils [req-54096c80-d521-4e1b-9b06-9b9c03a0d35c req-ce21e46d-4f6a-4eb2-9e7b-e000b187f231 service nova] Releasing lock "refresh_cache-8995f9cb-8454-4a98-9090-290f87f8af18" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1509.264501] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b20943ca-e7d3-4beb-948e-cf40fe8a1605 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.281083] env[62405]: DEBUG oslo_vmware.api [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Waiting for the task: (returnval){ [ 1509.281083] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52699c1d-8767-081f-1a06-fd88e75f038a" [ 1509.281083] env[62405]: _type = "Task" [ 1509.281083] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1509.295221] env[62405]: DEBUG oslo_vmware.api [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52699c1d-8767-081f-1a06-fd88e75f038a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1509.485773] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a5d5b9d-7086-44dd-b2a7-8e9b910d66e6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.500830] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffb9efe0-6b9a-434a-a7b7-03a220831f8d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.538187] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b9b1559-f903-466e-a340-1c37d9efd9d0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.550104] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb1bf76c-e521-4718-a8a7-fbd2d6bcd181 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.573490] env[62405]: DEBUG nova.compute.provider_tree [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1509.640477] env[62405]: DEBUG oslo_vmware.api [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Task: {'id': task-1946810, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.145822} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1509.640937] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: ca0aca02-4b99-4393-900c-b9cb0dad55c7] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1509.642316] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9df35b54-67cd-4ada-81f6-796b882c3829 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.673768] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: ca0aca02-4b99-4393-900c-b9cb0dad55c7] Reconfiguring VM instance instance-0000000e to attach disk [datastore1] ca0aca02-4b99-4393-900c-b9cb0dad55c7/ca0aca02-4b99-4393-900c-b9cb0dad55c7.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1509.674680] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-758c6d9b-3a12-4c68-88e9-462a00e79d44 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.701643] env[62405]: DEBUG nova.network.neutron [None req-e791cc57-ae0f-4404-8e7f-e93fe6fdcaf7 tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] [instance: 3f9849b8-6aaa-4d32-b140-207d5b54d68f] Updating instance_info_cache with network_info: [{"id": "ec10e8ab-572c-4bfa-810d-befff7776996", "address": "fa:16:3e:6e:8c:bf", "network": {"id": "6406e850-662e-40f1-8855-1b2f61663441", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1543725241-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9f4c6a0483674c7286fb3edcb24f70d8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e05affa-2640-435e-a124-0ee8a6ab1152", "external-id": "nsx-vlan-transportzone-839", "segmentation_id": 839, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapec10e8ab-57", "ovs_interfaceid": "ec10e8ab-572c-4bfa-810d-befff7776996", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1509.715641] env[62405]: DEBUG oslo_vmware.api [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Waiting for the task: (returnval){ [ 1509.715641] env[62405]: value = "task-1946813" [ 1509.715641] env[62405]: _type = "Task" [ 1509.715641] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1509.728962] env[62405]: DEBUG oslo_vmware.api [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Task: {'id': task-1946808, 'name': PowerOnVM_Task, 'duration_secs': 0.910658} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1509.729179] env[62405]: DEBUG oslo_vmware.api [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Task: {'id': task-1946811, 'name': PowerOnVM_Task} progress is 1%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1509.729430] env[62405]: DEBUG oslo_vmware.api [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Task: {'id': task-1946809, 'name': ReconfigVM_Task, 'duration_secs': 0.569152} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1509.733806] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] [instance: 02abae6c-8962-49eb-8fa9-36b13a20eff1] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1509.734112] env[62405]: INFO nova.compute.manager [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] [instance: 02abae6c-8962-49eb-8fa9-36b13a20eff1] Took 15.34 seconds to spawn the instance on the hypervisor. [ 1509.734300] env[62405]: DEBUG nova.compute.manager [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] [instance: 02abae6c-8962-49eb-8fa9-36b13a20eff1] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1509.734615] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: a1d35009-ea11-4e64-bbe4-604ed39d08f4] Reconfigured VM instance instance-0000000d to attach disk [datastore1] a1d35009-ea11-4e64-bbe4-604ed39d08f4/a1d35009-ea11-4e64-bbe4-604ed39d08f4.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1509.736100] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06895cbf-e415-4989-9d14-1bfe98e9e428 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.739668] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2ad96ff9-2b65-43ca-b371-dbc380bfafb0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.748518] env[62405]: DEBUG oslo_vmware.api [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Task: {'id': task-1946813, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1509.754904] env[62405]: DEBUG oslo_vmware.api [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Waiting for the task: (returnval){ [ 1509.754904] env[62405]: value = "task-1946814" [ 1509.754904] env[62405]: _type = "Task" [ 1509.754904] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1509.766885] env[62405]: DEBUG oslo_vmware.api [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Task: {'id': task-1946814, 'name': Rename_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1509.793657] env[62405]: DEBUG oslo_vmware.api [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52699c1d-8767-081f-1a06-fd88e75f038a, 'name': SearchDatastore_Task, 'duration_secs': 0.045996} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1509.793955] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1509.794239] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] b8ff115b-64f1-4584-afa2-478c5e6b726b/b8ff115b-64f1-4584-afa2-478c5e6b726b.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1509.794491] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-26f6fddd-5516-4bfb-9214-f64d6060d734 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.802738] env[62405]: DEBUG oslo_vmware.api [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Waiting for the task: (returnval){ [ 1509.802738] env[62405]: value = "task-1946815" [ 1509.802738] env[62405]: _type = "Task" [ 1509.802738] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1509.811917] env[62405]: DEBUG nova.network.neutron [req-bbfe44ec-1116-4359-bf77-52e562b3df89 req-22024b9a-8b6a-42ba-8078-f4ce5e2d520d service nova] [instance: a1d35009-ea11-4e64-bbe4-604ed39d08f4] Updated VIF entry in instance network info cache for port d385dca6-fc58-4113-bd50-3886fbe12d53. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1509.812258] env[62405]: DEBUG nova.network.neutron [req-bbfe44ec-1116-4359-bf77-52e562b3df89 req-22024b9a-8b6a-42ba-8078-f4ce5e2d520d service nova] [instance: a1d35009-ea11-4e64-bbe4-604ed39d08f4] Updating instance_info_cache with network_info: [{"id": "d385dca6-fc58-4113-bd50-3886fbe12d53", "address": "fa:16:3e:3b:50:9d", "network": {"id": "1428cc25-4f53-4239-85db-9f8c1df3b565", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-160875770-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e05b3582b75842c5908781d74ee041aa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d94740a-bce8-4103-8ecf-230d02ec0a44", "external-id": "nsx-vlan-transportzone-149", "segmentation_id": 149, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd385dca6-fc", "ovs_interfaceid": "d385dca6-fc58-4113-bd50-3886fbe12d53", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1509.819514] env[62405]: DEBUG oslo_vmware.api [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': task-1946815, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1509.890490] env[62405]: DEBUG nova.compute.manager [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] [instance: 792cd2c8-a67d-4b16-93ab-722fcc8b622d] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1509.932056] env[62405]: DEBUG nova.virt.hardware [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1509.932335] env[62405]: DEBUG nova.virt.hardware [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1509.932549] env[62405]: DEBUG nova.virt.hardware [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1509.932725] env[62405]: DEBUG nova.virt.hardware [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1509.933098] env[62405]: DEBUG nova.virt.hardware [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1509.933327] env[62405]: DEBUG nova.virt.hardware [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1509.933620] env[62405]: DEBUG nova.virt.hardware [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1509.933846] env[62405]: DEBUG nova.virt.hardware [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1509.934140] env[62405]: DEBUG nova.virt.hardware [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1509.934397] env[62405]: DEBUG nova.virt.hardware [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1509.934850] env[62405]: DEBUG nova.virt.hardware [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1509.935660] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f67e628a-4411-463d-8293-c8f53334bb78 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.946280] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47a98c56-9b64-4ad2-9ad2-23684d89f870 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.972282] env[62405]: DEBUG nova.compute.manager [req-2aec3c88-aa30-41f2-8041-95e598e3287e req-db9be137-5af8-480d-9687-fc822ed655cf service nova] [instance: ca0aca02-4b99-4393-900c-b9cb0dad55c7] Received event network-vif-plugged-2df3353e-cc22-401d-ba57-099a6e08d7e7 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1509.972498] env[62405]: DEBUG oslo_concurrency.lockutils [req-2aec3c88-aa30-41f2-8041-95e598e3287e req-db9be137-5af8-480d-9687-fc822ed655cf service nova] Acquiring lock "ca0aca02-4b99-4393-900c-b9cb0dad55c7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1509.972713] env[62405]: DEBUG oslo_concurrency.lockutils [req-2aec3c88-aa30-41f2-8041-95e598e3287e req-db9be137-5af8-480d-9687-fc822ed655cf service nova] Lock "ca0aca02-4b99-4393-900c-b9cb0dad55c7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1509.972918] env[62405]: DEBUG oslo_concurrency.lockutils [req-2aec3c88-aa30-41f2-8041-95e598e3287e req-db9be137-5af8-480d-9687-fc822ed655cf service nova] Lock "ca0aca02-4b99-4393-900c-b9cb0dad55c7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1509.973125] env[62405]: DEBUG nova.compute.manager [req-2aec3c88-aa30-41f2-8041-95e598e3287e req-db9be137-5af8-480d-9687-fc822ed655cf service nova] [instance: ca0aca02-4b99-4393-900c-b9cb0dad55c7] No waiting events found dispatching network-vif-plugged-2df3353e-cc22-401d-ba57-099a6e08d7e7 {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1509.973420] env[62405]: WARNING nova.compute.manager [req-2aec3c88-aa30-41f2-8041-95e598e3287e req-db9be137-5af8-480d-9687-fc822ed655cf service nova] [instance: ca0aca02-4b99-4393-900c-b9cb0dad55c7] Received unexpected event network-vif-plugged-2df3353e-cc22-401d-ba57-099a6e08d7e7 for instance with vm_state building and task_state spawning. [ 1509.973577] env[62405]: DEBUG nova.compute.manager [req-2aec3c88-aa30-41f2-8041-95e598e3287e req-db9be137-5af8-480d-9687-fc822ed655cf service nova] [instance: ca0aca02-4b99-4393-900c-b9cb0dad55c7] Received event network-changed-2df3353e-cc22-401d-ba57-099a6e08d7e7 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1509.973731] env[62405]: DEBUG nova.compute.manager [req-2aec3c88-aa30-41f2-8041-95e598e3287e req-db9be137-5af8-480d-9687-fc822ed655cf service nova] [instance: ca0aca02-4b99-4393-900c-b9cb0dad55c7] Refreshing instance network info cache due to event network-changed-2df3353e-cc22-401d-ba57-099a6e08d7e7. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1509.973935] env[62405]: DEBUG oslo_concurrency.lockutils [req-2aec3c88-aa30-41f2-8041-95e598e3287e req-db9be137-5af8-480d-9687-fc822ed655cf service nova] Acquiring lock "refresh_cache-ca0aca02-4b99-4393-900c-b9cb0dad55c7" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1509.974093] env[62405]: DEBUG oslo_concurrency.lockutils [req-2aec3c88-aa30-41f2-8041-95e598e3287e req-db9be137-5af8-480d-9687-fc822ed655cf service nova] Acquired lock "refresh_cache-ca0aca02-4b99-4393-900c-b9cb0dad55c7" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1509.974225] env[62405]: DEBUG nova.network.neutron [req-2aec3c88-aa30-41f2-8041-95e598e3287e req-db9be137-5af8-480d-9687-fc822ed655cf service nova] [instance: ca0aca02-4b99-4393-900c-b9cb0dad55c7] Refreshing network info cache for port 2df3353e-cc22-401d-ba57-099a6e08d7e7 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1510.109182] env[62405]: ERROR nova.scheduler.client.report [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] [req-8fbedac7-7099-4d39-907f-aa3a2bf6c6d1] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7d5eded7-a501-4fa6-b1d3-60e273d555d7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-8fbedac7-7099-4d39-907f-aa3a2bf6c6d1"}]} [ 1510.133400] env[62405]: DEBUG nova.scheduler.client.report [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Refreshing inventories for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1510.153454] env[62405]: DEBUG nova.scheduler.client.report [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Updating ProviderTree inventory for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1510.153726] env[62405]: DEBUG nova.compute.provider_tree [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1510.171056] env[62405]: DEBUG nova.scheduler.client.report [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Refreshing aggregate associations for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7, aggregates: None {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1510.203694] env[62405]: DEBUG nova.scheduler.client.report [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Refreshing trait associations for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1510.216313] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ca943265-0b77-4b99-b254-a845228e25bf tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Acquiring lock "801e7086-5742-4a04-962c-7546284aa12d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1510.219026] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ca943265-0b77-4b99-b254-a845228e25bf tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Lock "801e7086-5742-4a04-962c-7546284aa12d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1510.219026] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ca943265-0b77-4b99-b254-a845228e25bf tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Acquiring lock "801e7086-5742-4a04-962c-7546284aa12d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1510.219026] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ca943265-0b77-4b99-b254-a845228e25bf tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Lock "801e7086-5742-4a04-962c-7546284aa12d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1510.219026] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ca943265-0b77-4b99-b254-a845228e25bf tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Lock "801e7086-5742-4a04-962c-7546284aa12d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1510.219680] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e791cc57-ae0f-4404-8e7f-e93fe6fdcaf7 tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] Releasing lock "refresh_cache-3f9849b8-6aaa-4d32-b140-207d5b54d68f" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1510.220065] env[62405]: DEBUG nova.compute.manager [None req-e791cc57-ae0f-4404-8e7f-e93fe6fdcaf7 tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] [instance: 3f9849b8-6aaa-4d32-b140-207d5b54d68f] Instance network_info: |[{"id": "ec10e8ab-572c-4bfa-810d-befff7776996", "address": "fa:16:3e:6e:8c:bf", "network": {"id": "6406e850-662e-40f1-8855-1b2f61663441", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1543725241-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9f4c6a0483674c7286fb3edcb24f70d8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e05affa-2640-435e-a124-0ee8a6ab1152", "external-id": "nsx-vlan-transportzone-839", "segmentation_id": 839, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapec10e8ab-57", "ovs_interfaceid": "ec10e8ab-572c-4bfa-810d-befff7776996", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1510.220414] env[62405]: DEBUG oslo_vmware.api [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Task: {'id': task-1946811, 'name': PowerOnVM_Task} progress is 64%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1510.220902] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-e791cc57-ae0f-4404-8e7f-e93fe6fdcaf7 tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] [instance: 3f9849b8-6aaa-4d32-b140-207d5b54d68f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6e:8c:bf', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3e05affa-2640-435e-a124-0ee8a6ab1152', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ec10e8ab-572c-4bfa-810d-befff7776996', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1510.231614] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-e791cc57-ae0f-4404-8e7f-e93fe6fdcaf7 tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] Creating folder: Project (9f4c6a0483674c7286fb3edcb24f70d8). Parent ref: group-v401284. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1510.232997] env[62405]: INFO nova.compute.manager [None req-ca943265-0b77-4b99-b254-a845228e25bf tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] [instance: 801e7086-5742-4a04-962c-7546284aa12d] Terminating instance [ 1510.234386] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e47fa7ec-cf95-4f70-9ce5-33c59fe07b45 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.256359] env[62405]: DEBUG oslo_vmware.api [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Task: {'id': task-1946813, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1510.264703] env[62405]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 1510.265035] env[62405]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=62405) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 1510.276837] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-e791cc57-ae0f-4404-8e7f-e93fe6fdcaf7 tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] Folder already exists: Project (9f4c6a0483674c7286fb3edcb24f70d8). Parent ref: group-v401284. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 1510.277110] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-e791cc57-ae0f-4404-8e7f-e93fe6fdcaf7 tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] Creating folder: Instances. Parent ref: group-v401285. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1510.282762] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-69422fda-0375-41a5-b558-c0425bdb2ccd {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.285454] env[62405]: INFO nova.compute.manager [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] [instance: 02abae6c-8962-49eb-8fa9-36b13a20eff1] Took 21.22 seconds to build instance. [ 1510.295406] env[62405]: DEBUG oslo_vmware.api [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Task: {'id': task-1946814, 'name': Rename_Task, 'duration_secs': 0.252954} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1510.296160] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: a1d35009-ea11-4e64-bbe4-604ed39d08f4] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1510.296652] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-20115a30-2df5-4a1a-8a4f-5c5297f34b41 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.301142] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-e791cc57-ae0f-4404-8e7f-e93fe6fdcaf7 tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] Created folder: Instances in parent group-v401285. [ 1510.302507] env[62405]: DEBUG oslo.service.loopingcall [None req-e791cc57-ae0f-4404-8e7f-e93fe6fdcaf7 tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1510.304803] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3f9849b8-6aaa-4d32-b140-207d5b54d68f] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1510.305264] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8d4fc451-fb5e-460a-9b06-47322bb905d5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.328663] env[62405]: DEBUG oslo_concurrency.lockutils [req-bbfe44ec-1116-4359-bf77-52e562b3df89 req-22024b9a-8b6a-42ba-8078-f4ce5e2d520d service nova] Releasing lock "refresh_cache-a1d35009-ea11-4e64-bbe4-604ed39d08f4" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1510.330804] env[62405]: DEBUG nova.compute.manager [req-bbfe44ec-1116-4359-bf77-52e562b3df89 req-22024b9a-8b6a-42ba-8078-f4ce5e2d520d service nova] [instance: 0491dc4b-cf35-4035-aca9-baf43b86af7e] Received event network-changed-19538d37-e369-4f7b-8051-61d2c0a7fb00 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1510.330804] env[62405]: DEBUG nova.compute.manager [req-bbfe44ec-1116-4359-bf77-52e562b3df89 req-22024b9a-8b6a-42ba-8078-f4ce5e2d520d service nova] [instance: 0491dc4b-cf35-4035-aca9-baf43b86af7e] Refreshing instance network info cache due to event network-changed-19538d37-e369-4f7b-8051-61d2c0a7fb00. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1510.330804] env[62405]: DEBUG oslo_concurrency.lockutils [req-bbfe44ec-1116-4359-bf77-52e562b3df89 req-22024b9a-8b6a-42ba-8078-f4ce5e2d520d service nova] Acquiring lock "refresh_cache-0491dc4b-cf35-4035-aca9-baf43b86af7e" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1510.330804] env[62405]: DEBUG oslo_concurrency.lockutils [req-bbfe44ec-1116-4359-bf77-52e562b3df89 req-22024b9a-8b6a-42ba-8078-f4ce5e2d520d service nova] Acquired lock "refresh_cache-0491dc4b-cf35-4035-aca9-baf43b86af7e" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1510.330804] env[62405]: DEBUG nova.network.neutron [req-bbfe44ec-1116-4359-bf77-52e562b3df89 req-22024b9a-8b6a-42ba-8078-f4ce5e2d520d service nova] [instance: 0491dc4b-cf35-4035-aca9-baf43b86af7e] Refreshing network info cache for port 19538d37-e369-4f7b-8051-61d2c0a7fb00 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1510.331316] env[62405]: DEBUG oslo_vmware.api [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Waiting for the task: (returnval){ [ 1510.331316] env[62405]: value = "task-1946818" [ 1510.331316] env[62405]: _type = "Task" [ 1510.331316] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1510.340973] env[62405]: DEBUG oslo_vmware.api [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': task-1946815, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1510.343035] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1510.343035] env[62405]: value = "task-1946819" [ 1510.343035] env[62405]: _type = "Task" [ 1510.343035] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1510.351139] env[62405]: DEBUG oslo_vmware.api [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Task: {'id': task-1946818, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1510.361746] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946819, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1510.385154] env[62405]: DEBUG nova.compute.manager [req-24ab477c-2446-4f6f-ac4a-1d1670dd2d5d req-3a7309be-0fe2-4b4d-947a-60c53e8711a6 service nova] [instance: 9b71f962-2b92-4f7b-bb8d-b50da5130018] Received event network-changed-0805ecfc-d6ef-4bff-a3a1-0f8af74e57a0 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1510.385432] env[62405]: DEBUG nova.compute.manager [req-24ab477c-2446-4f6f-ac4a-1d1670dd2d5d req-3a7309be-0fe2-4b4d-947a-60c53e8711a6 service nova] [instance: 9b71f962-2b92-4f7b-bb8d-b50da5130018] Refreshing instance network info cache due to event network-changed-0805ecfc-d6ef-4bff-a3a1-0f8af74e57a0. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1510.385771] env[62405]: DEBUG oslo_concurrency.lockutils [req-24ab477c-2446-4f6f-ac4a-1d1670dd2d5d req-3a7309be-0fe2-4b4d-947a-60c53e8711a6 service nova] Acquiring lock "refresh_cache-9b71f962-2b92-4f7b-bb8d-b50da5130018" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1510.385872] env[62405]: DEBUG oslo_concurrency.lockutils [req-24ab477c-2446-4f6f-ac4a-1d1670dd2d5d req-3a7309be-0fe2-4b4d-947a-60c53e8711a6 service nova] Acquired lock "refresh_cache-9b71f962-2b92-4f7b-bb8d-b50da5130018" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1510.386138] env[62405]: DEBUG nova.network.neutron [req-24ab477c-2446-4f6f-ac4a-1d1670dd2d5d req-3a7309be-0fe2-4b4d-947a-60c53e8711a6 service nova] [instance: 9b71f962-2b92-4f7b-bb8d-b50da5130018] Refreshing network info cache for port 0805ecfc-d6ef-4bff-a3a1-0f8af74e57a0 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1510.714812] env[62405]: DEBUG oslo_vmware.api [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Task: {'id': task-1946811, 'name': PowerOnVM_Task} progress is 64%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1510.745473] env[62405]: DEBUG nova.compute.manager [None req-ca943265-0b77-4b99-b254-a845228e25bf tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] [instance: 801e7086-5742-4a04-962c-7546284aa12d] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1510.745847] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-ca943265-0b77-4b99-b254-a845228e25bf tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] [instance: 801e7086-5742-4a04-962c-7546284aa12d] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1510.746075] env[62405]: DEBUG oslo_vmware.api [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Task: {'id': task-1946813, 'name': ReconfigVM_Task, 'duration_secs': 0.546181} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1510.747308] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab468daf-a8f7-4b8d-ad18-01d4fd730240 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.750404] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: ca0aca02-4b99-4393-900c-b9cb0dad55c7] Reconfigured VM instance instance-0000000e to attach disk [datastore1] ca0aca02-4b99-4393-900c-b9cb0dad55c7/ca0aca02-4b99-4393-900c-b9cb0dad55c7.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1510.753678] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-58e3a2dc-b077-470a-ac01-34835ee2a67d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.762329] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca943265-0b77-4b99-b254-a845228e25bf tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] [instance: 801e7086-5742-4a04-962c-7546284aa12d] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1510.763775] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-055c481e-3603-44db-9600-237df3090712 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.765609] env[62405]: DEBUG oslo_vmware.api [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Waiting for the task: (returnval){ [ 1510.765609] env[62405]: value = "task-1946820" [ 1510.765609] env[62405]: _type = "Task" [ 1510.765609] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1510.776665] env[62405]: DEBUG oslo_vmware.api [None req-ca943265-0b77-4b99-b254-a845228e25bf tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Waiting for the task: (returnval){ [ 1510.776665] env[62405]: value = "task-1946821" [ 1510.776665] env[62405]: _type = "Task" [ 1510.776665] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1510.784744] env[62405]: DEBUG oslo_vmware.api [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Task: {'id': task-1946820, 'name': Rename_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1510.787319] env[62405]: DEBUG oslo_vmware.api [None req-ca943265-0b77-4b99-b254-a845228e25bf tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Task: {'id': task-1946821, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1510.797084] env[62405]: DEBUG oslo_concurrency.lockutils [None req-59074c07-ded2-4786-b30f-310dd9a0aa79 tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Lock "02abae6c-8962-49eb-8fa9-36b13a20eff1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.741s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1510.800395] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78931499-dc7d-4d45-9f51-e762d32fa015 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.813206] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4e7f476-f0be-421d-9a89-11c978a0a387 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.820974] env[62405]: DEBUG oslo_vmware.api [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': task-1946815, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.585689} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1510.821714] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] b8ff115b-64f1-4584-afa2-478c5e6b726b/b8ff115b-64f1-4584-afa2-478c5e6b726b.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1510.821964] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b8ff115b-64f1-4584-afa2-478c5e6b726b] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1510.822542] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8bc6a237-6a9d-48a7-8544-12ec64509d72 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.869301] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6247fd8a-30ee-4bba-9052-7c4f7448f220 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.877310] env[62405]: DEBUG oslo_vmware.api [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Waiting for the task: (returnval){ [ 1510.877310] env[62405]: value = "task-1946822" [ 1510.877310] env[62405]: _type = "Task" [ 1510.877310] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1510.893365] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946819, 'name': CreateVM_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1510.895908] env[62405]: DEBUG oslo_vmware.api [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Task: {'id': task-1946818, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1510.897858] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d1548e8-d344-43b4-b1ec-55c055ed3e5d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.906512] env[62405]: DEBUG oslo_vmware.api [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': task-1946822, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1510.919256] env[62405]: DEBUG nova.compute.provider_tree [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1511.052973] env[62405]: DEBUG nova.network.neutron [req-2aec3c88-aa30-41f2-8041-95e598e3287e req-db9be137-5af8-480d-9687-fc822ed655cf service nova] [instance: ca0aca02-4b99-4393-900c-b9cb0dad55c7] Updated VIF entry in instance network info cache for port 2df3353e-cc22-401d-ba57-099a6e08d7e7. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1511.053650] env[62405]: DEBUG nova.network.neutron [req-2aec3c88-aa30-41f2-8041-95e598e3287e req-db9be137-5af8-480d-9687-fc822ed655cf service nova] [instance: ca0aca02-4b99-4393-900c-b9cb0dad55c7] Updating instance_info_cache with network_info: [{"id": "2df3353e-cc22-401d-ba57-099a6e08d7e7", "address": "fa:16:3e:8e:9d:0d", "network": {"id": "1428cc25-4f53-4239-85db-9f8c1df3b565", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-160875770-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e05b3582b75842c5908781d74ee041aa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d94740a-bce8-4103-8ecf-230d02ec0a44", "external-id": "nsx-vlan-transportzone-149", "segmentation_id": 149, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2df3353e-cc", "ovs_interfaceid": "2df3353e-cc22-401d-ba57-099a6e08d7e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1511.210417] env[62405]: DEBUG oslo_vmware.api [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Task: {'id': task-1946811, 'name': PowerOnVM_Task, 'duration_secs': 1.604772} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1511.210417] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: 8995f9cb-8454-4a98-9090-290f87f8af18] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1511.210510] env[62405]: INFO nova.compute.manager [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: 8995f9cb-8454-4a98-9090-290f87f8af18] Took 14.38 seconds to spawn the instance on the hypervisor. [ 1511.210605] env[62405]: DEBUG nova.compute.manager [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: 8995f9cb-8454-4a98-9090-290f87f8af18] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1511.212900] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96280d1d-74ae-423b-a186-2f71f3d99c79 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.280081] env[62405]: DEBUG oslo_vmware.api [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Task: {'id': task-1946820, 'name': Rename_Task, 'duration_secs': 0.292539} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1511.283480] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: ca0aca02-4b99-4393-900c-b9cb0dad55c7] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1511.283884] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-87057739-602a-4fe3-b3a6-acdef92ade9b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.294568] env[62405]: DEBUG oslo_vmware.api [None req-ca943265-0b77-4b99-b254-a845228e25bf tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Task: {'id': task-1946821, 'name': PowerOffVM_Task, 'duration_secs': 0.292437} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1511.296446] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca943265-0b77-4b99-b254-a845228e25bf tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] [instance: 801e7086-5742-4a04-962c-7546284aa12d] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1511.296737] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-ca943265-0b77-4b99-b254-a845228e25bf tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] [instance: 801e7086-5742-4a04-962c-7546284aa12d] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1511.297430] env[62405]: DEBUG oslo_vmware.api [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Waiting for the task: (returnval){ [ 1511.297430] env[62405]: value = "task-1946823" [ 1511.297430] env[62405]: _type = "Task" [ 1511.297430] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1511.297801] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-84165dad-aba3-4ad3-ad48-eca244fbce50 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.306718] env[62405]: DEBUG nova.network.neutron [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] [instance: 792cd2c8-a67d-4b16-93ab-722fcc8b622d] Successfully updated port: c1f4fd8a-cda2-4206-b706-58f6fa8c722e {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1511.309607] env[62405]: DEBUG nova.compute.manager [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [instance: 777ddb84-25b9-4da6-be6b-a2289dbf510a] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1511.331430] env[62405]: DEBUG oslo_vmware.api [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Task: {'id': task-1946823, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1511.373833] env[62405]: DEBUG oslo_vmware.api [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Task: {'id': task-1946818, 'name': PowerOnVM_Task, 'duration_secs': 0.812734} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1511.374242] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946819, 'name': CreateVM_Task, 'duration_secs': 0.679191} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1511.374601] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: a1d35009-ea11-4e64-bbe4-604ed39d08f4] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1511.374923] env[62405]: INFO nova.compute.manager [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: a1d35009-ea11-4e64-bbe4-604ed39d08f4] Took 12.11 seconds to spawn the instance on the hypervisor. [ 1511.375243] env[62405]: DEBUG nova.compute.manager [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: a1d35009-ea11-4e64-bbe4-604ed39d08f4] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1511.375572] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3f9849b8-6aaa-4d32-b140-207d5b54d68f] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1511.376532] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2af840b-23b9-44b5-9c5b-5bb3d123f7e4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.379530] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-e791cc57-ae0f-4404-8e7f-e93fe6fdcaf7 tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] [instance: 3f9849b8-6aaa-4d32-b140-207d5b54d68f] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'boot_index': 0, 'delete_on_termination': True, 'guest_format': None, 'mount_device': '/dev/sda', 'device_type': None, 'disk_bus': None, 'attachment_id': 'e85ff6fb-ee85-441e-8290-104159212db4', 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-401291', 'volume_id': '09314eff-d5f0-4a4a-a4b2-f7844bc0cf35', 'name': 'volume-09314eff-d5f0-4a4a-a4b2-f7844bc0cf35', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '3f9849b8-6aaa-4d32-b140-207d5b54d68f', 'attached_at': '', 'detached_at': '', 'volume_id': '09314eff-d5f0-4a4a-a4b2-f7844bc0cf35', 'serial': '09314eff-d5f0-4a4a-a4b2-f7844bc0cf35'}, 'volume_type': None}], 'swap': None} {{(pid=62405) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1511.379965] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-e791cc57-ae0f-4404-8e7f-e93fe6fdcaf7 tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] [instance: 3f9849b8-6aaa-4d32-b140-207d5b54d68f] Root volume attach. Driver type: vmdk {{(pid=62405) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1511.381182] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4106b9d7-2c77-4c6c-a639-b1c8bbb0d663 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.398537] env[62405]: DEBUG oslo_vmware.api [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': task-1946822, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.150037} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1511.401789] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b8ff115b-64f1-4584-afa2-478c5e6b726b] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1511.402747] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-877b6c99-90bb-47f9-981a-fe819472b778 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.406142] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7b05834-a2ba-492d-9eb0-78d22468c714 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.422620] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdacaf82-599f-4929-a86b-bb511623f39a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.437834] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b8ff115b-64f1-4584-afa2-478c5e6b726b] Reconfiguring VM instance instance-0000000f to attach disk [datastore1] b8ff115b-64f1-4584-afa2-478c5e6b726b/b8ff115b-64f1-4584-afa2-478c5e6b726b.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1511.439636] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b3b9b377-6125-4a09-a3e1-4956215468d4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.461646] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-252b5206-605d-4cee-94e1-a90433eaae05 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.464427] env[62405]: DEBUG oslo_vmware.api [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Waiting for the task: (returnval){ [ 1511.464427] env[62405]: value = "task-1946825" [ 1511.464427] env[62405]: _type = "Task" [ 1511.464427] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1511.473376] env[62405]: DEBUG oslo_vmware.api [None req-e791cc57-ae0f-4404-8e7f-e93fe6fdcaf7 tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] Waiting for the task: (returnval){ [ 1511.473376] env[62405]: value = "task-1946826" [ 1511.473376] env[62405]: _type = "Task" [ 1511.473376] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1511.477317] env[62405]: DEBUG nova.scheduler.client.report [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Updated inventory for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with generation 36 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1511.477558] env[62405]: DEBUG nova.compute.provider_tree [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Updating resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 generation from 36 to 37 during operation: update_inventory {{(pid=62405) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1511.477735] env[62405]: DEBUG nova.compute.provider_tree [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1511.485066] env[62405]: DEBUG oslo_vmware.api [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': task-1946825, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1511.491565] env[62405]: DEBUG oslo_vmware.api [None req-e791cc57-ae0f-4404-8e7f-e93fe6fdcaf7 tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] Task: {'id': task-1946826, 'name': RelocateVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1511.556882] env[62405]: DEBUG oslo_concurrency.lockutils [req-2aec3c88-aa30-41f2-8041-95e598e3287e req-db9be137-5af8-480d-9687-fc822ed655cf service nova] Releasing lock "refresh_cache-ca0aca02-4b99-4393-900c-b9cb0dad55c7" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1511.557175] env[62405]: DEBUG nova.compute.manager [req-2aec3c88-aa30-41f2-8041-95e598e3287e req-db9be137-5af8-480d-9687-fc822ed655cf service nova] [instance: b8ff115b-64f1-4584-afa2-478c5e6b726b] Received event network-vif-plugged-531c83a1-6a38-4d64-8757-3ffee5c271ee {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1511.557435] env[62405]: DEBUG oslo_concurrency.lockutils [req-2aec3c88-aa30-41f2-8041-95e598e3287e req-db9be137-5af8-480d-9687-fc822ed655cf service nova] Acquiring lock "b8ff115b-64f1-4584-afa2-478c5e6b726b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1511.557927] env[62405]: DEBUG oslo_concurrency.lockutils [req-2aec3c88-aa30-41f2-8041-95e598e3287e req-db9be137-5af8-480d-9687-fc822ed655cf service nova] Lock "b8ff115b-64f1-4584-afa2-478c5e6b726b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1511.558271] env[62405]: DEBUG oslo_concurrency.lockutils [req-2aec3c88-aa30-41f2-8041-95e598e3287e req-db9be137-5af8-480d-9687-fc822ed655cf service nova] Lock "b8ff115b-64f1-4584-afa2-478c5e6b726b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1511.558526] env[62405]: DEBUG nova.compute.manager [req-2aec3c88-aa30-41f2-8041-95e598e3287e req-db9be137-5af8-480d-9687-fc822ed655cf service nova] [instance: b8ff115b-64f1-4584-afa2-478c5e6b726b] No waiting events found dispatching network-vif-plugged-531c83a1-6a38-4d64-8757-3ffee5c271ee {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1511.558869] env[62405]: WARNING nova.compute.manager [req-2aec3c88-aa30-41f2-8041-95e598e3287e req-db9be137-5af8-480d-9687-fc822ed655cf service nova] [instance: b8ff115b-64f1-4584-afa2-478c5e6b726b] Received unexpected event network-vif-plugged-531c83a1-6a38-4d64-8757-3ffee5c271ee for instance with vm_state building and task_state spawning. [ 1511.559157] env[62405]: DEBUG nova.compute.manager [req-2aec3c88-aa30-41f2-8041-95e598e3287e req-db9be137-5af8-480d-9687-fc822ed655cf service nova] [instance: b8ff115b-64f1-4584-afa2-478c5e6b726b] Received event network-changed-531c83a1-6a38-4d64-8757-3ffee5c271ee {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1511.559444] env[62405]: DEBUG nova.compute.manager [req-2aec3c88-aa30-41f2-8041-95e598e3287e req-db9be137-5af8-480d-9687-fc822ed655cf service nova] [instance: b8ff115b-64f1-4584-afa2-478c5e6b726b] Refreshing instance network info cache due to event network-changed-531c83a1-6a38-4d64-8757-3ffee5c271ee. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1511.559735] env[62405]: DEBUG oslo_concurrency.lockutils [req-2aec3c88-aa30-41f2-8041-95e598e3287e req-db9be137-5af8-480d-9687-fc822ed655cf service nova] Acquiring lock "refresh_cache-b8ff115b-64f1-4584-afa2-478c5e6b726b" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1511.559859] env[62405]: DEBUG oslo_concurrency.lockutils [req-2aec3c88-aa30-41f2-8041-95e598e3287e req-db9be137-5af8-480d-9687-fc822ed655cf service nova] Acquired lock "refresh_cache-b8ff115b-64f1-4584-afa2-478c5e6b726b" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1511.560021] env[62405]: DEBUG nova.network.neutron [req-2aec3c88-aa30-41f2-8041-95e598e3287e req-db9be137-5af8-480d-9687-fc822ed655cf service nova] [instance: b8ff115b-64f1-4584-afa2-478c5e6b726b] Refreshing network info cache for port 531c83a1-6a38-4d64-8757-3ffee5c271ee {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1511.573681] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-ca943265-0b77-4b99-b254-a845228e25bf tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] [instance: 801e7086-5742-4a04-962c-7546284aa12d] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1511.573983] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-ca943265-0b77-4b99-b254-a845228e25bf tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] [instance: 801e7086-5742-4a04-962c-7546284aa12d] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1511.574205] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca943265-0b77-4b99-b254-a845228e25bf tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Deleting the datastore file [datastore1] 801e7086-5742-4a04-962c-7546284aa12d {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1511.575158] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8d42f03c-b79b-4dbe-84d6-c758f536bb1b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.586589] env[62405]: DEBUG oslo_vmware.api [None req-ca943265-0b77-4b99-b254-a845228e25bf tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Waiting for the task: (returnval){ [ 1511.586589] env[62405]: value = "task-1946827" [ 1511.586589] env[62405]: _type = "Task" [ 1511.586589] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1511.598283] env[62405]: DEBUG oslo_vmware.api [None req-ca943265-0b77-4b99-b254-a845228e25bf tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Task: {'id': task-1946827, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1511.617608] env[62405]: DEBUG nova.network.neutron [req-bbfe44ec-1116-4359-bf77-52e562b3df89 req-22024b9a-8b6a-42ba-8078-f4ce5e2d520d service nova] [instance: 0491dc4b-cf35-4035-aca9-baf43b86af7e] Updated VIF entry in instance network info cache for port 19538d37-e369-4f7b-8051-61d2c0a7fb00. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1511.618033] env[62405]: DEBUG nova.network.neutron [req-bbfe44ec-1116-4359-bf77-52e562b3df89 req-22024b9a-8b6a-42ba-8078-f4ce5e2d520d service nova] [instance: 0491dc4b-cf35-4035-aca9-baf43b86af7e] Updating instance_info_cache with network_info: [{"id": "19538d37-e369-4f7b-8051-61d2c0a7fb00", "address": "fa:16:3e:62:6a:7a", "network": {"id": "bf574ed2-2a7e-4cf2-aa38-0adccf456674", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-2099360932-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8cf1f39c8aef41df8c86777f80980664", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60567ee6-01d0-4b16-9c7a-4a896827d6eb", "external-id": "nsx-vlan-transportzone-28", "segmentation_id": 28, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap19538d37-e3", "ovs_interfaceid": "19538d37-e369-4f7b-8051-61d2c0a7fb00", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1511.650714] env[62405]: DEBUG nova.network.neutron [req-24ab477c-2446-4f6f-ac4a-1d1670dd2d5d req-3a7309be-0fe2-4b4d-947a-60c53e8711a6 service nova] [instance: 9b71f962-2b92-4f7b-bb8d-b50da5130018] Updated VIF entry in instance network info cache for port 0805ecfc-d6ef-4bff-a3a1-0f8af74e57a0. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1511.650924] env[62405]: DEBUG nova.network.neutron [req-24ab477c-2446-4f6f-ac4a-1d1670dd2d5d req-3a7309be-0fe2-4b4d-947a-60c53e8711a6 service nova] [instance: 9b71f962-2b92-4f7b-bb8d-b50da5130018] Updating instance_info_cache with network_info: [{"id": "0805ecfc-d6ef-4bff-a3a1-0f8af74e57a0", "address": "fa:16:3e:f7:0a:02", "network": {"id": "672e2f4d-571c-431f-bc4f-101f0e233d70", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-758460415-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.251", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "37e71f387ca845b99564479baf7a9012", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4fc0575-c8e3-4f3c-b2e1-e10ac2d0cc1a", "external-id": "nsx-vlan-transportzone-256", "segmentation_id": 256, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0805ecfc-d6", "ovs_interfaceid": "0805ecfc-d6ef-4bff-a3a1-0f8af74e57a0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1511.738472] env[62405]: INFO nova.compute.manager [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: 8995f9cb-8454-4a98-9090-290f87f8af18] Took 22.35 seconds to build instance. [ 1511.821395] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Acquiring lock "refresh_cache-792cd2c8-a67d-4b16-93ab-722fcc8b622d" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1511.821677] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Acquired lock "refresh_cache-792cd2c8-a67d-4b16-93ab-722fcc8b622d" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1511.821878] env[62405]: DEBUG nova.network.neutron [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] [instance: 792cd2c8-a67d-4b16-93ab-722fcc8b622d] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1511.823481] env[62405]: DEBUG oslo_vmware.api [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Task: {'id': task-1946823, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1511.850889] env[62405]: DEBUG oslo_concurrency.lockutils [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1511.915650] env[62405]: INFO nova.compute.manager [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: a1d35009-ea11-4e64-bbe4-604ed39d08f4] Took 22.48 seconds to build instance. [ 1511.980445] env[62405]: DEBUG oslo_vmware.api [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': task-1946825, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1511.982487] env[62405]: DEBUG oslo_concurrency.lockutils [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.131s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1511.983137] env[62405]: DEBUG nova.compute.manager [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] [instance: 0eec4a5f-9f9b-4a86-a046-2e2d107adc48] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1511.991768] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 9.501s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1512.007263] env[62405]: DEBUG oslo_vmware.api [None req-e791cc57-ae0f-4404-8e7f-e93fe6fdcaf7 tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] Task: {'id': task-1946826, 'name': RelocateVM_Task, 'duration_secs': 0.035142} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1512.008347] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-e791cc57-ae0f-4404-8e7f-e93fe6fdcaf7 tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] [instance: 3f9849b8-6aaa-4d32-b140-207d5b54d68f] Volume attach. Driver type: vmdk {{(pid=62405) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1512.009365] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-e791cc57-ae0f-4404-8e7f-e93fe6fdcaf7 tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] [instance: 3f9849b8-6aaa-4d32-b140-207d5b54d68f] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-401291', 'volume_id': '09314eff-d5f0-4a4a-a4b2-f7844bc0cf35', 'name': 'volume-09314eff-d5f0-4a4a-a4b2-f7844bc0cf35', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '3f9849b8-6aaa-4d32-b140-207d5b54d68f', 'attached_at': '', 'detached_at': '', 'volume_id': '09314eff-d5f0-4a4a-a4b2-f7844bc0cf35', 'serial': '09314eff-d5f0-4a4a-a4b2-f7844bc0cf35'} {{(pid=62405) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1512.010345] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1030432-7b27-4bbc-bd5f-7c001937a45d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.031400] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0321e573-2696-48e7-a706-7f7c11afb900 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.058914] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-e791cc57-ae0f-4404-8e7f-e93fe6fdcaf7 tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] [instance: 3f9849b8-6aaa-4d32-b140-207d5b54d68f] Reconfiguring VM instance instance-00000010 to attach disk [datastore1] volume-09314eff-d5f0-4a4a-a4b2-f7844bc0cf35/volume-09314eff-d5f0-4a4a-a4b2-f7844bc0cf35.vmdk or device None with type thin {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1512.060864] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-499cf054-bd16-45cd-87be-a06b59cba2ec {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.085968] env[62405]: DEBUG oslo_vmware.api [None req-e791cc57-ae0f-4404-8e7f-e93fe6fdcaf7 tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] Waiting for the task: (returnval){ [ 1512.085968] env[62405]: value = "task-1946829" [ 1512.085968] env[62405]: _type = "Task" [ 1512.085968] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1512.097844] env[62405]: DEBUG oslo_vmware.api [None req-e791cc57-ae0f-4404-8e7f-e93fe6fdcaf7 tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] Task: {'id': task-1946829, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1512.101450] env[62405]: DEBUG oslo_vmware.api [None req-ca943265-0b77-4b99-b254-a845228e25bf tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Task: {'id': task-1946827, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.266133} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1512.101724] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca943265-0b77-4b99-b254-a845228e25bf tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1512.101953] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-ca943265-0b77-4b99-b254-a845228e25bf tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] [instance: 801e7086-5742-4a04-962c-7546284aa12d] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1512.102183] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-ca943265-0b77-4b99-b254-a845228e25bf tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] [instance: 801e7086-5742-4a04-962c-7546284aa12d] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1512.102374] env[62405]: INFO nova.compute.manager [None req-ca943265-0b77-4b99-b254-a845228e25bf tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] [instance: 801e7086-5742-4a04-962c-7546284aa12d] Took 1.36 seconds to destroy the instance on the hypervisor. [ 1512.102620] env[62405]: DEBUG oslo.service.loopingcall [None req-ca943265-0b77-4b99-b254-a845228e25bf tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1512.102804] env[62405]: DEBUG nova.compute.manager [-] [instance: 801e7086-5742-4a04-962c-7546284aa12d] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1512.102954] env[62405]: DEBUG nova.network.neutron [-] [instance: 801e7086-5742-4a04-962c-7546284aa12d] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1512.122958] env[62405]: DEBUG oslo_concurrency.lockutils [req-bbfe44ec-1116-4359-bf77-52e562b3df89 req-22024b9a-8b6a-42ba-8078-f4ce5e2d520d service nova] Releasing lock "refresh_cache-0491dc4b-cf35-4035-aca9-baf43b86af7e" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1512.154396] env[62405]: DEBUG oslo_concurrency.lockutils [req-24ab477c-2446-4f6f-ac4a-1d1670dd2d5d req-3a7309be-0fe2-4b4d-947a-60c53e8711a6 service nova] Releasing lock "refresh_cache-9b71f962-2b92-4f7b-bb8d-b50da5130018" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1512.154673] env[62405]: DEBUG nova.compute.manager [req-24ab477c-2446-4f6f-ac4a-1d1670dd2d5d req-3a7309be-0fe2-4b4d-947a-60c53e8711a6 service nova] [instance: 3c0b964f-c900-4704-ae12-7eba7952f678] Received event network-vif-deleted-ff5be597-1e44-4215-81eb-9129935b393c {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1512.240837] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Lock "8995f9cb-8454-4a98-9090-290f87f8af18" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.860s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1512.315346] env[62405]: DEBUG oslo_vmware.api [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Task: {'id': task-1946823, 'name': PowerOnVM_Task, 'duration_secs': 0.841596} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1512.315672] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: ca0aca02-4b99-4393-900c-b9cb0dad55c7] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1512.315978] env[62405]: INFO nova.compute.manager [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: ca0aca02-4b99-4393-900c-b9cb0dad55c7] Took 10.38 seconds to spawn the instance on the hypervisor. [ 1512.316217] env[62405]: DEBUG nova.compute.manager [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: ca0aca02-4b99-4393-900c-b9cb0dad55c7] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1512.316880] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-745659e8-ad15-45a9-b955-47acdb17cbf7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.363913] env[62405]: DEBUG nova.network.neutron [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] [instance: 792cd2c8-a67d-4b16-93ab-722fcc8b622d] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1512.417726] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Lock "a1d35009-ea11-4e64-bbe4-604ed39d08f4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.998s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1512.481566] env[62405]: DEBUG oslo_vmware.api [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': task-1946825, 'name': ReconfigVM_Task, 'duration_secs': 0.561167} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1512.481815] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b8ff115b-64f1-4584-afa2-478c5e6b726b] Reconfigured VM instance instance-0000000f to attach disk [datastore1] b8ff115b-64f1-4584-afa2-478c5e6b726b/b8ff115b-64f1-4584-afa2-478c5e6b726b.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1512.482593] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dac14b49-8f5e-4f54-9e80-baf142d01a2c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.493656] env[62405]: DEBUG nova.compute.utils [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1512.494578] env[62405]: DEBUG oslo_vmware.api [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Waiting for the task: (returnval){ [ 1512.494578] env[62405]: value = "task-1946830" [ 1512.494578] env[62405]: _type = "Task" [ 1512.494578] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1512.494840] env[62405]: DEBUG nova.compute.manager [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] [instance: 0eec4a5f-9f9b-4a86-a046-2e2d107adc48] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1512.495008] env[62405]: DEBUG nova.network.neutron [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] [instance: 0eec4a5f-9f9b-4a86-a046-2e2d107adc48] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1512.521518] env[62405]: DEBUG oslo_vmware.api [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': task-1946830, 'name': Rename_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1512.525553] env[62405]: DEBUG nova.network.neutron [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] [instance: 792cd2c8-a67d-4b16-93ab-722fcc8b622d] Updating instance_info_cache with network_info: [{"id": "c1f4fd8a-cda2-4206-b706-58f6fa8c722e", "address": "fa:16:3e:97:6f:79", "network": {"id": "5289661e-534d-40b0-8a99-75fc7c9da4f3", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1070103644-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fe2966f5756b424fbfbca6677e4d948a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9e6b7d9f-c4e9-4623-9eb5-840ca1a8224c", "external-id": "nsx-vlan-transportzone-782", "segmentation_id": 782, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc1f4fd8a-cd", "ovs_interfaceid": "c1f4fd8a-cda2-4206-b706-58f6fa8c722e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1512.598295] env[62405]: DEBUG oslo_vmware.api [None req-e791cc57-ae0f-4404-8e7f-e93fe6fdcaf7 tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] Task: {'id': task-1946829, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1512.671927] env[62405]: DEBUG nova.policy [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '14163d09c6f843f3be5e5359034713c2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '25a20f4605e14a52bdf53052483b7e1a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1512.695243] env[62405]: DEBUG nova.network.neutron [req-2aec3c88-aa30-41f2-8041-95e598e3287e req-db9be137-5af8-480d-9687-fc822ed655cf service nova] [instance: b8ff115b-64f1-4584-afa2-478c5e6b726b] Updated VIF entry in instance network info cache for port 531c83a1-6a38-4d64-8757-3ffee5c271ee. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1512.695243] env[62405]: DEBUG nova.network.neutron [req-2aec3c88-aa30-41f2-8041-95e598e3287e req-db9be137-5af8-480d-9687-fc822ed655cf service nova] [instance: b8ff115b-64f1-4584-afa2-478c5e6b726b] Updating instance_info_cache with network_info: [{"id": "531c83a1-6a38-4d64-8757-3ffee5c271ee", "address": "fa:16:3e:0a:f4:a9", "network": {"id": "9e3e6e3d-df77-428f-b577-e4a42e82ec43", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.182", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "69dc3a146cd14230b1180689e2fea090", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31e77685-b4dd-4810-80ef-24115ea9ea62", "external-id": "nsx-vlan-transportzone-56", "segmentation_id": 56, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap531c83a1-6a", "ovs_interfaceid": "531c83a1-6a38-4d64-8757-3ffee5c271ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1512.745132] env[62405]: DEBUG nova.compute.manager [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] [instance: f8c6f99f-499f-4886-aae9-5f08969175f6] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1512.808484] env[62405]: DEBUG nova.compute.manager [None req-54cab6dd-c950-4797-858a-7108807100cb tempest-ServerExternalEventsTest-208808329 tempest-ServerExternalEventsTest-208808329-project] [instance: 02abae6c-8962-49eb-8fa9-36b13a20eff1] Received event network-changed {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1512.808787] env[62405]: DEBUG nova.compute.manager [None req-54cab6dd-c950-4797-858a-7108807100cb tempest-ServerExternalEventsTest-208808329 tempest-ServerExternalEventsTest-208808329-project] [instance: 02abae6c-8962-49eb-8fa9-36b13a20eff1] Refreshing instance network info cache due to event network-changed. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1512.809521] env[62405]: DEBUG oslo_concurrency.lockutils [None req-54cab6dd-c950-4797-858a-7108807100cb tempest-ServerExternalEventsTest-208808329 tempest-ServerExternalEventsTest-208808329-project] Acquiring lock "refresh_cache-02abae6c-8962-49eb-8fa9-36b13a20eff1" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1512.809521] env[62405]: DEBUG oslo_concurrency.lockutils [None req-54cab6dd-c950-4797-858a-7108807100cb tempest-ServerExternalEventsTest-208808329 tempest-ServerExternalEventsTest-208808329-project] Acquired lock "refresh_cache-02abae6c-8962-49eb-8fa9-36b13a20eff1" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1512.809713] env[62405]: DEBUG nova.network.neutron [None req-54cab6dd-c950-4797-858a-7108807100cb tempest-ServerExternalEventsTest-208808329 tempest-ServerExternalEventsTest-208808329-project] [instance: 02abae6c-8962-49eb-8fa9-36b13a20eff1] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1512.842480] env[62405]: INFO nova.compute.manager [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: ca0aca02-4b99-4393-900c-b9cb0dad55c7] Took 20.84 seconds to build instance. [ 1512.923903] env[62405]: DEBUG nova.compute.manager [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] [instance: b21dc1e7-dacd-4154-9bc3-0fa3774695a8] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1512.998022] env[62405]: DEBUG nova.compute.manager [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] [instance: 0eec4a5f-9f9b-4a86-a046-2e2d107adc48] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1513.011235] env[62405]: INFO nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 058682a1-5240-4414-9203-c612ecd12999] Updating resource usage from migration 80a37923-23e1-4b60-aaf2-72933f6694b9 [ 1513.014147] env[62405]: DEBUG oslo_vmware.api [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': task-1946830, 'name': Rename_Task, 'duration_secs': 0.263688} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1513.014797] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b8ff115b-64f1-4584-afa2-478c5e6b726b] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1513.015181] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-131f61a6-088e-4960-9ca9-7bbf176c6a9e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.023654] env[62405]: DEBUG oslo_vmware.api [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Waiting for the task: (returnval){ [ 1513.023654] env[62405]: value = "task-1946831" [ 1513.023654] env[62405]: _type = "Task" [ 1513.023654] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1513.028124] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Releasing lock "refresh_cache-792cd2c8-a67d-4b16-93ab-722fcc8b622d" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1513.029973] env[62405]: DEBUG nova.compute.manager [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] [instance: 792cd2c8-a67d-4b16-93ab-722fcc8b622d] Instance network_info: |[{"id": "c1f4fd8a-cda2-4206-b706-58f6fa8c722e", "address": "fa:16:3e:97:6f:79", "network": {"id": "5289661e-534d-40b0-8a99-75fc7c9da4f3", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1070103644-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fe2966f5756b424fbfbca6677e4d948a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9e6b7d9f-c4e9-4623-9eb5-840ca1a8224c", "external-id": "nsx-vlan-transportzone-782", "segmentation_id": 782, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc1f4fd8a-cd", "ovs_interfaceid": "c1f4fd8a-cda2-4206-b706-58f6fa8c722e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1513.030326] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] [instance: 792cd2c8-a67d-4b16-93ab-722fcc8b622d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:97:6f:79', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9e6b7d9f-c4e9-4623-9eb5-840ca1a8224c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c1f4fd8a-cda2-4206-b706-58f6fa8c722e', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1513.038432] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Creating folder: Project (fe2966f5756b424fbfbca6677e4d948a). Parent ref: group-v401284. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1513.040146] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ef268c40-3741-4cf7-a728-43a2d0268ddc {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.048813] env[62405]: DEBUG oslo_vmware.api [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': task-1946831, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1513.060139] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Created folder: Project (fe2966f5756b424fbfbca6677e4d948a) in parent group-v401284. [ 1513.062028] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Creating folder: Instances. Parent ref: group-v401336. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1513.062028] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ad8dc118-5564-45af-927f-33bdc63069ad {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.073760] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Created folder: Instances in parent group-v401336. [ 1513.074286] env[62405]: DEBUG oslo.service.loopingcall [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1513.074673] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 792cd2c8-a67d-4b16-93ab-722fcc8b622d] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1513.075255] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cf789108-a0e0-4037-9f10-b4f8de1bdc47 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.102648] env[62405]: DEBUG oslo_vmware.api [None req-e791cc57-ae0f-4404-8e7f-e93fe6fdcaf7 tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] Task: {'id': task-1946829, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1513.104444] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1513.104444] env[62405]: value = "task-1946834" [ 1513.104444] env[62405]: _type = "Task" [ 1513.104444] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1513.113273] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946834, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1513.197327] env[62405]: DEBUG oslo_concurrency.lockutils [req-2aec3c88-aa30-41f2-8041-95e598e3287e req-db9be137-5af8-480d-9687-fc822ed655cf service nova] Releasing lock "refresh_cache-b8ff115b-64f1-4584-afa2-478c5e6b726b" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1513.197603] env[62405]: DEBUG nova.compute.manager [req-2aec3c88-aa30-41f2-8041-95e598e3287e req-db9be137-5af8-480d-9687-fc822ed655cf service nova] [instance: 3f9849b8-6aaa-4d32-b140-207d5b54d68f] Received event network-vif-plugged-ec10e8ab-572c-4bfa-810d-befff7776996 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1513.197803] env[62405]: DEBUG oslo_concurrency.lockutils [req-2aec3c88-aa30-41f2-8041-95e598e3287e req-db9be137-5af8-480d-9687-fc822ed655cf service nova] Acquiring lock "3f9849b8-6aaa-4d32-b140-207d5b54d68f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1513.198021] env[62405]: DEBUG oslo_concurrency.lockutils [req-2aec3c88-aa30-41f2-8041-95e598e3287e req-db9be137-5af8-480d-9687-fc822ed655cf service nova] Lock "3f9849b8-6aaa-4d32-b140-207d5b54d68f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1513.198197] env[62405]: DEBUG oslo_concurrency.lockutils [req-2aec3c88-aa30-41f2-8041-95e598e3287e req-db9be137-5af8-480d-9687-fc822ed655cf service nova] Lock "3f9849b8-6aaa-4d32-b140-207d5b54d68f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1513.198368] env[62405]: DEBUG nova.compute.manager [req-2aec3c88-aa30-41f2-8041-95e598e3287e req-db9be137-5af8-480d-9687-fc822ed655cf service nova] [instance: 3f9849b8-6aaa-4d32-b140-207d5b54d68f] No waiting events found dispatching network-vif-plugged-ec10e8ab-572c-4bfa-810d-befff7776996 {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1513.198563] env[62405]: WARNING nova.compute.manager [req-2aec3c88-aa30-41f2-8041-95e598e3287e req-db9be137-5af8-480d-9687-fc822ed655cf service nova] [instance: 3f9849b8-6aaa-4d32-b140-207d5b54d68f] Received unexpected event network-vif-plugged-ec10e8ab-572c-4bfa-810d-befff7776996 for instance with vm_state building and task_state spawning. [ 1513.274442] env[62405]: DEBUG oslo_concurrency.lockutils [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1513.338033] env[62405]: DEBUG nova.network.neutron [-] [instance: 801e7086-5742-4a04-962c-7546284aa12d] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1513.344251] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9cfcffa3-8e79-4b3c-892c-ff69e6f340d1 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Lock "ca0aca02-4b99-4393-900c-b9cb0dad55c7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.884s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1513.350529] env[62405]: DEBUG nova.network.neutron [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] [instance: 0eec4a5f-9f9b-4a86-a046-2e2d107adc48] Successfully created port: ffc190ac-0f46-477c-bb7a-53327b5884bb {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1513.443947] env[62405]: DEBUG oslo_concurrency.lockutils [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1513.535582] env[62405]: DEBUG oslo_vmware.api [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': task-1946831, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1513.552992] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 777ddb84-25b9-4da6-be6b-a2289dbf510a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1513.552992] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 6199de01-baca-4461-9572-111eda11adac actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1513.552992] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 0eec4a5f-9f9b-4a86-a046-2e2d107adc48 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1513.552992] env[62405]: WARNING nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 2257c786-54f9-441a-832c-cf3178bfcc78 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1513.553214] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 9b71f962-2b92-4f7b-bb8d-b50da5130018 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1513.553214] env[62405]: WARNING nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 8624629d-642a-4adf-984e-3925beeb4fef is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1513.553214] env[62405]: WARNING nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 3c0b964f-c900-4704-ae12-7eba7952f678 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1513.553214] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 7db1b086-942e-4890-8750-0d717e522786 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1513.606563] env[62405]: DEBUG oslo_vmware.api [None req-e791cc57-ae0f-4404-8e7f-e93fe6fdcaf7 tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] Task: {'id': task-1946829, 'name': ReconfigVM_Task, 'duration_secs': 1.34661} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1513.611813] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-e791cc57-ae0f-4404-8e7f-e93fe6fdcaf7 tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] [instance: 3f9849b8-6aaa-4d32-b140-207d5b54d68f] Reconfigured VM instance instance-00000010 to attach disk [datastore1] volume-09314eff-d5f0-4a4a-a4b2-f7844bc0cf35/volume-09314eff-d5f0-4a4a-a4b2-f7844bc0cf35.vmdk or device None with type thin {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1513.621239] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-434307da-361d-44f9-8759-144e453e939a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.645636] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946834, 'name': CreateVM_Task} progress is 25%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1513.647137] env[62405]: DEBUG oslo_vmware.api [None req-e791cc57-ae0f-4404-8e7f-e93fe6fdcaf7 tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] Waiting for the task: (returnval){ [ 1513.647137] env[62405]: value = "task-1946835" [ 1513.647137] env[62405]: _type = "Task" [ 1513.647137] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1513.656820] env[62405]: DEBUG oslo_vmware.api [None req-e791cc57-ae0f-4404-8e7f-e93fe6fdcaf7 tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] Task: {'id': task-1946835, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1513.841105] env[62405]: INFO nova.compute.manager [-] [instance: 801e7086-5742-4a04-962c-7546284aa12d] Took 1.74 seconds to deallocate network for instance. [ 1513.846861] env[62405]: DEBUG nova.compute.manager [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: ca4d11fe-1d0f-468b-a2f4-21c5b84342ab] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1513.951035] env[62405]: DEBUG nova.network.neutron [None req-54cab6dd-c950-4797-858a-7108807100cb tempest-ServerExternalEventsTest-208808329 tempest-ServerExternalEventsTest-208808329-project] [instance: 02abae6c-8962-49eb-8fa9-36b13a20eff1] Updating instance_info_cache with network_info: [{"id": "77f01bbe-48b5-4ad3-b215-90ff9d429d0b", "address": "fa:16:3e:07:19:b6", "network": {"id": "9e3e6e3d-df77-428f-b577-e4a42e82ec43", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.28", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "69dc3a146cd14230b1180689e2fea090", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31e77685-b4dd-4810-80ef-24115ea9ea62", "external-id": "nsx-vlan-transportzone-56", "segmentation_id": 56, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap77f01bbe-48", "ovs_interfaceid": "77f01bbe-48b5-4ad3-b215-90ff9d429d0b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1513.968419] env[62405]: DEBUG nova.compute.manager [req-32e615d3-9ccc-4a29-9f19-c5e76986e183 req-5939b222-ddd6-4b5d-899c-30285b07ff65 service nova] [instance: 3f9849b8-6aaa-4d32-b140-207d5b54d68f] Received event network-changed-ec10e8ab-572c-4bfa-810d-befff7776996 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1513.968646] env[62405]: DEBUG nova.compute.manager [req-32e615d3-9ccc-4a29-9f19-c5e76986e183 req-5939b222-ddd6-4b5d-899c-30285b07ff65 service nova] [instance: 3f9849b8-6aaa-4d32-b140-207d5b54d68f] Refreshing instance network info cache due to event network-changed-ec10e8ab-572c-4bfa-810d-befff7776996. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1513.968871] env[62405]: DEBUG oslo_concurrency.lockutils [req-32e615d3-9ccc-4a29-9f19-c5e76986e183 req-5939b222-ddd6-4b5d-899c-30285b07ff65 service nova] Acquiring lock "refresh_cache-3f9849b8-6aaa-4d32-b140-207d5b54d68f" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1513.969085] env[62405]: DEBUG oslo_concurrency.lockutils [req-32e615d3-9ccc-4a29-9f19-c5e76986e183 req-5939b222-ddd6-4b5d-899c-30285b07ff65 service nova] Acquired lock "refresh_cache-3f9849b8-6aaa-4d32-b140-207d5b54d68f" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1513.969186] env[62405]: DEBUG nova.network.neutron [req-32e615d3-9ccc-4a29-9f19-c5e76986e183 req-5939b222-ddd6-4b5d-899c-30285b07ff65 service nova] [instance: 3f9849b8-6aaa-4d32-b140-207d5b54d68f] Refreshing network info cache for port ec10e8ab-572c-4bfa-810d-befff7776996 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1514.004728] env[62405]: DEBUG nova.compute.manager [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] [instance: 0eec4a5f-9f9b-4a86-a046-2e2d107adc48] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1514.041888] env[62405]: DEBUG oslo_vmware.api [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': task-1946831, 'name': PowerOnVM_Task, 'duration_secs': 0.763281} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1514.045618] env[62405]: DEBUG nova.virt.hardware [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1514.045975] env[62405]: DEBUG nova.virt.hardware [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1514.046209] env[62405]: DEBUG nova.virt.hardware [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1514.046471] env[62405]: DEBUG nova.virt.hardware [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1514.046683] env[62405]: DEBUG nova.virt.hardware [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1514.046890] env[62405]: DEBUG nova.virt.hardware [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1514.047213] env[62405]: DEBUG nova.virt.hardware [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1514.047445] env[62405]: DEBUG nova.virt.hardware [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1514.047681] env[62405]: DEBUG nova.virt.hardware [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1514.047909] env[62405]: DEBUG nova.virt.hardware [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1514.048186] env[62405]: DEBUG nova.virt.hardware [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1514.048635] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b8ff115b-64f1-4584-afa2-478c5e6b726b] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1514.048899] env[62405]: INFO nova.compute.manager [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b8ff115b-64f1-4584-afa2-478c5e6b726b] Took 9.62 seconds to spawn the instance on the hypervisor. [ 1514.049177] env[62405]: DEBUG nova.compute.manager [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b8ff115b-64f1-4584-afa2-478c5e6b726b] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1514.050754] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a12289ac-0b7f-4ad4-9c60-2740b0e429de {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.056048] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d31d6381-a38f-47a8-a29d-b68f5530b00e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.062962] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance f8c6f99f-499f-4886-aae9-5f08969175f6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1514.063223] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 262424b0-dc7d-4b6c-9539-2d6cd23a93da actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1514.074879] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9714617c-795c-4b1c-b525-8ff92be8fa48 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.117288] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946834, 'name': CreateVM_Task, 'duration_secs': 0.736594} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1514.117507] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 792cd2c8-a67d-4b16-93ab-722fcc8b622d] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1514.118245] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1514.118416] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1514.118763] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1514.119647] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-70adb5ea-6497-4413-892e-4a5e12d4b939 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.126555] env[62405]: DEBUG oslo_vmware.api [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Waiting for the task: (returnval){ [ 1514.126555] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52cb9e2d-ed53-268c-7ee2-3884f8dd0a77" [ 1514.126555] env[62405]: _type = "Task" [ 1514.126555] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1514.136117] env[62405]: DEBUG oslo_vmware.api [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52cb9e2d-ed53-268c-7ee2-3884f8dd0a77, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1514.160156] env[62405]: DEBUG oslo_vmware.api [None req-e791cc57-ae0f-4404-8e7f-e93fe6fdcaf7 tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] Task: {'id': task-1946835, 'name': ReconfigVM_Task, 'duration_secs': 0.188567} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1514.160651] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-e791cc57-ae0f-4404-8e7f-e93fe6fdcaf7 tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] [instance: 3f9849b8-6aaa-4d32-b140-207d5b54d68f] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-401291', 'volume_id': '09314eff-d5f0-4a4a-a4b2-f7844bc0cf35', 'name': 'volume-09314eff-d5f0-4a4a-a4b2-f7844bc0cf35', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '3f9849b8-6aaa-4d32-b140-207d5b54d68f', 'attached_at': '', 'detached_at': '', 'volume_id': '09314eff-d5f0-4a4a-a4b2-f7844bc0cf35', 'serial': '09314eff-d5f0-4a4a-a4b2-f7844bc0cf35'} {{(pid=62405) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1514.161526] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8cdc22f4-e3a4-4c1f-965c-24f388b91ce5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.189217] env[62405]: DEBUG oslo_vmware.api [None req-e791cc57-ae0f-4404-8e7f-e93fe6fdcaf7 tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] Waiting for the task: (returnval){ [ 1514.189217] env[62405]: value = "task-1946837" [ 1514.189217] env[62405]: _type = "Task" [ 1514.189217] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1514.198525] env[62405]: DEBUG oslo_vmware.api [None req-e791cc57-ae0f-4404-8e7f-e93fe6fdcaf7 tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] Task: {'id': task-1946837, 'name': Rename_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1514.288677] env[62405]: DEBUG nova.compute.manager [req-19dc6008-e6db-49a6-88ca-6ec0aa9e3a86 req-86ede7a4-70ba-4e03-bba3-ed1de13b0cad service nova] [instance: 801e7086-5742-4a04-962c-7546284aa12d] Received event network-vif-deleted-a6c201e5-eb87-434f-9c74-9f99937836fd {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1514.352025] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ca943265-0b77-4b99-b254-a845228e25bf tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1514.372733] env[62405]: DEBUG oslo_concurrency.lockutils [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1514.454339] env[62405]: DEBUG oslo_concurrency.lockutils [None req-54cab6dd-c950-4797-858a-7108807100cb tempest-ServerExternalEventsTest-208808329 tempest-ServerExternalEventsTest-208808329-project] Releasing lock "refresh_cache-02abae6c-8962-49eb-8fa9-36b13a20eff1" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1514.567115] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 65462c7a-372e-4ba6-8f6d-e300080d65d0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1514.595434] env[62405]: INFO nova.compute.manager [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b8ff115b-64f1-4584-afa2-478c5e6b726b] Took 20.90 seconds to build instance. [ 1514.643178] env[62405]: DEBUG oslo_vmware.api [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52cb9e2d-ed53-268c-7ee2-3884f8dd0a77, 'name': SearchDatastore_Task, 'duration_secs': 0.015289} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1514.643493] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1514.643682] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] [instance: 792cd2c8-a67d-4b16-93ab-722fcc8b622d] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1514.643960] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1514.644206] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1514.644424] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1514.644841] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-493020a4-e7d7-4d7b-9c3c-eed0968340dd {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.661033] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1514.661033] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1514.662346] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-09481ff8-8fc6-45dc-b26c-a8cd88d3b564 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.670783] env[62405]: DEBUG oslo_vmware.api [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Waiting for the task: (returnval){ [ 1514.670783] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]520f6697-7cf8-466d-e51a-7a2a23ae8229" [ 1514.670783] env[62405]: _type = "Task" [ 1514.670783] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1514.680889] env[62405]: DEBUG oslo_vmware.api [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]520f6697-7cf8-466d-e51a-7a2a23ae8229, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1514.700259] env[62405]: DEBUG oslo_vmware.api [None req-e791cc57-ae0f-4404-8e7f-e93fe6fdcaf7 tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] Task: {'id': task-1946837, 'name': Rename_Task, 'duration_secs': 0.195278} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1514.700572] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-e791cc57-ae0f-4404-8e7f-e93fe6fdcaf7 tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] [instance: 3f9849b8-6aaa-4d32-b140-207d5b54d68f] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1514.700781] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e4bbfe7a-f17a-449e-b8eb-f1fbfa4c15c2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.710238] env[62405]: DEBUG oslo_vmware.api [None req-e791cc57-ae0f-4404-8e7f-e93fe6fdcaf7 tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] Waiting for the task: (returnval){ [ 1514.710238] env[62405]: value = "task-1946838" [ 1514.710238] env[62405]: _type = "Task" [ 1514.710238] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1514.727950] env[62405]: DEBUG oslo_vmware.api [None req-e791cc57-ae0f-4404-8e7f-e93fe6fdcaf7 tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] Task: {'id': task-1946838, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1514.820239] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a7ffb043-2765-4989-b831-725982cf077a tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Acquiring lock "02abae6c-8962-49eb-8fa9-36b13a20eff1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1514.821087] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a7ffb043-2765-4989-b831-725982cf077a tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Lock "02abae6c-8962-49eb-8fa9-36b13a20eff1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1514.821437] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a7ffb043-2765-4989-b831-725982cf077a tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Acquiring lock "02abae6c-8962-49eb-8fa9-36b13a20eff1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1514.824018] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a7ffb043-2765-4989-b831-725982cf077a tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Lock "02abae6c-8962-49eb-8fa9-36b13a20eff1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1514.824018] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a7ffb043-2765-4989-b831-725982cf077a tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Lock "02abae6c-8962-49eb-8fa9-36b13a20eff1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1514.824463] env[62405]: INFO nova.compute.manager [None req-a7ffb043-2765-4989-b831-725982cf077a tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] [instance: 02abae6c-8962-49eb-8fa9-36b13a20eff1] Terminating instance [ 1514.886994] env[62405]: DEBUG nova.network.neutron [req-32e615d3-9ccc-4a29-9f19-c5e76986e183 req-5939b222-ddd6-4b5d-899c-30285b07ff65 service nova] [instance: 3f9849b8-6aaa-4d32-b140-207d5b54d68f] Updated VIF entry in instance network info cache for port ec10e8ab-572c-4bfa-810d-befff7776996. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1514.887423] env[62405]: DEBUG nova.network.neutron [req-32e615d3-9ccc-4a29-9f19-c5e76986e183 req-5939b222-ddd6-4b5d-899c-30285b07ff65 service nova] [instance: 3f9849b8-6aaa-4d32-b140-207d5b54d68f] Updating instance_info_cache with network_info: [{"id": "ec10e8ab-572c-4bfa-810d-befff7776996", "address": "fa:16:3e:6e:8c:bf", "network": {"id": "6406e850-662e-40f1-8855-1b2f61663441", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1543725241-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9f4c6a0483674c7286fb3edcb24f70d8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e05affa-2640-435e-a124-0ee8a6ab1152", "external-id": "nsx-vlan-transportzone-839", "segmentation_id": 839, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapec10e8ab-57", "ovs_interfaceid": "ec10e8ab-572c-4bfa-810d-befff7776996", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1515.073486] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance fbedaa93-5968-4b42-b93e-201d2b44b32b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1515.073486] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 801e7086-5742-4a04-962c-7546284aa12d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1515.073486] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 0491dc4b-cf35-4035-aca9-baf43b86af7e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1515.100613] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ab54135d-533c-4f1b-9107-8de27026f313 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Lock "b8ff115b-64f1-4584-afa2-478c5e6b726b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.411s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1515.193670] env[62405]: DEBUG oslo_vmware.api [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]520f6697-7cf8-466d-e51a-7a2a23ae8229, 'name': SearchDatastore_Task, 'duration_secs': 0.012573} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1515.194960] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8c54726c-b62e-41a0-8e38-11c6fd1e83c7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.204042] env[62405]: DEBUG oslo_vmware.api [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Waiting for the task: (returnval){ [ 1515.204042] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]521112b7-e0b7-53f1-a445-187745266c16" [ 1515.204042] env[62405]: _type = "Task" [ 1515.204042] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1515.214212] env[62405]: DEBUG oslo_vmware.api [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]521112b7-e0b7-53f1-a445-187745266c16, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1515.225018] env[62405]: DEBUG oslo_vmware.api [None req-e791cc57-ae0f-4404-8e7f-e93fe6fdcaf7 tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] Task: {'id': task-1946838, 'name': PowerOnVM_Task} progress is 1%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1515.329809] env[62405]: DEBUG nova.compute.manager [None req-a7ffb043-2765-4989-b831-725982cf077a tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] [instance: 02abae6c-8962-49eb-8fa9-36b13a20eff1] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1515.329809] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-a7ffb043-2765-4989-b831-725982cf077a tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] [instance: 02abae6c-8962-49eb-8fa9-36b13a20eff1] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1515.330694] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd77f21d-3281-4a1b-88a0-1d6f85fc38ae {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.341114] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7ffb043-2765-4989-b831-725982cf077a tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] [instance: 02abae6c-8962-49eb-8fa9-36b13a20eff1] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1515.341497] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-aea6e05f-d36a-4f0b-8099-ec68ab398e47 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.350361] env[62405]: DEBUG oslo_vmware.api [None req-a7ffb043-2765-4989-b831-725982cf077a tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Waiting for the task: (returnval){ [ 1515.350361] env[62405]: value = "task-1946839" [ 1515.350361] env[62405]: _type = "Task" [ 1515.350361] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1515.360955] env[62405]: DEBUG oslo_vmware.api [None req-a7ffb043-2765-4989-b831-725982cf077a tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Task: {'id': task-1946839, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1515.390217] env[62405]: DEBUG oslo_concurrency.lockutils [req-32e615d3-9ccc-4a29-9f19-c5e76986e183 req-5939b222-ddd6-4b5d-899c-30285b07ff65 service nova] Releasing lock "refresh_cache-3f9849b8-6aaa-4d32-b140-207d5b54d68f" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1515.390476] env[62405]: DEBUG nova.compute.manager [req-32e615d3-9ccc-4a29-9f19-c5e76986e183 req-5939b222-ddd6-4b5d-899c-30285b07ff65 service nova] [instance: 792cd2c8-a67d-4b16-93ab-722fcc8b622d] Received event network-vif-plugged-c1f4fd8a-cda2-4206-b706-58f6fa8c722e {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1515.390744] env[62405]: DEBUG oslo_concurrency.lockutils [req-32e615d3-9ccc-4a29-9f19-c5e76986e183 req-5939b222-ddd6-4b5d-899c-30285b07ff65 service nova] Acquiring lock "792cd2c8-a67d-4b16-93ab-722fcc8b622d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1515.390856] env[62405]: DEBUG oslo_concurrency.lockutils [req-32e615d3-9ccc-4a29-9f19-c5e76986e183 req-5939b222-ddd6-4b5d-899c-30285b07ff65 service nova] Lock "792cd2c8-a67d-4b16-93ab-722fcc8b622d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1515.391073] env[62405]: DEBUG oslo_concurrency.lockutils [req-32e615d3-9ccc-4a29-9f19-c5e76986e183 req-5939b222-ddd6-4b5d-899c-30285b07ff65 service nova] Lock "792cd2c8-a67d-4b16-93ab-722fcc8b622d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1515.391261] env[62405]: DEBUG nova.compute.manager [req-32e615d3-9ccc-4a29-9f19-c5e76986e183 req-5939b222-ddd6-4b5d-899c-30285b07ff65 service nova] [instance: 792cd2c8-a67d-4b16-93ab-722fcc8b622d] No waiting events found dispatching network-vif-plugged-c1f4fd8a-cda2-4206-b706-58f6fa8c722e {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1515.391466] env[62405]: WARNING nova.compute.manager [req-32e615d3-9ccc-4a29-9f19-c5e76986e183 req-5939b222-ddd6-4b5d-899c-30285b07ff65 service nova] [instance: 792cd2c8-a67d-4b16-93ab-722fcc8b622d] Received unexpected event network-vif-plugged-c1f4fd8a-cda2-4206-b706-58f6fa8c722e for instance with vm_state building and task_state spawning. [ 1515.391668] env[62405]: DEBUG nova.compute.manager [req-32e615d3-9ccc-4a29-9f19-c5e76986e183 req-5939b222-ddd6-4b5d-899c-30285b07ff65 service nova] [instance: 7db1b086-942e-4890-8750-0d717e522786] Received event network-changed-acb33455-b824-40fd-99bd-4628778412a0 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1515.391883] env[62405]: DEBUG nova.compute.manager [req-32e615d3-9ccc-4a29-9f19-c5e76986e183 req-5939b222-ddd6-4b5d-899c-30285b07ff65 service nova] [instance: 7db1b086-942e-4890-8750-0d717e522786] Refreshing instance network info cache due to event network-changed-acb33455-b824-40fd-99bd-4628778412a0. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1515.391993] env[62405]: DEBUG oslo_concurrency.lockutils [req-32e615d3-9ccc-4a29-9f19-c5e76986e183 req-5939b222-ddd6-4b5d-899c-30285b07ff65 service nova] Acquiring lock "refresh_cache-7db1b086-942e-4890-8750-0d717e522786" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1515.392144] env[62405]: DEBUG oslo_concurrency.lockutils [req-32e615d3-9ccc-4a29-9f19-c5e76986e183 req-5939b222-ddd6-4b5d-899c-30285b07ff65 service nova] Acquired lock "refresh_cache-7db1b086-942e-4890-8750-0d717e522786" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1515.392447] env[62405]: DEBUG nova.network.neutron [req-32e615d3-9ccc-4a29-9f19-c5e76986e183 req-5939b222-ddd6-4b5d-899c-30285b07ff65 service nova] [instance: 7db1b086-942e-4890-8750-0d717e522786] Refreshing network info cache for port acb33455-b824-40fd-99bd-4628778412a0 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1515.575619] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance b21dc1e7-dacd-4154-9bc3-0fa3774695a8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1515.575810] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 3f9849b8-6aaa-4d32-b140-207d5b54d68f actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1515.602532] env[62405]: DEBUG nova.compute.manager [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] [instance: 65462c7a-372e-4ba6-8f6d-e300080d65d0] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1515.720326] env[62405]: DEBUG oslo_vmware.api [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]521112b7-e0b7-53f1-a445-187745266c16, 'name': SearchDatastore_Task, 'duration_secs': 0.024461} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1515.732956] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1515.732956] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 792cd2c8-a67d-4b16-93ab-722fcc8b622d/792cd2c8-a67d-4b16-93ab-722fcc8b622d.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1515.732956] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b25148cb-56aa-4739-bb03-5a0ad6f50eea {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.744310] env[62405]: DEBUG oslo_vmware.api [None req-e791cc57-ae0f-4404-8e7f-e93fe6fdcaf7 tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] Task: {'id': task-1946838, 'name': PowerOnVM_Task} progress is 64%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1515.745675] env[62405]: DEBUG oslo_vmware.api [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Waiting for the task: (returnval){ [ 1515.745675] env[62405]: value = "task-1946840" [ 1515.745675] env[62405]: _type = "Task" [ 1515.745675] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1515.756108] env[62405]: DEBUG oslo_vmware.api [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Task: {'id': task-1946840, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1515.863924] env[62405]: DEBUG oslo_vmware.api [None req-a7ffb043-2765-4989-b831-725982cf077a tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Task: {'id': task-1946839, 'name': PowerOffVM_Task, 'duration_secs': 0.247331} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1515.864255] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7ffb043-2765-4989-b831-725982cf077a tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] [instance: 02abae6c-8962-49eb-8fa9-36b13a20eff1] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1515.864428] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-a7ffb043-2765-4989-b831-725982cf077a tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] [instance: 02abae6c-8962-49eb-8fa9-36b13a20eff1] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1515.864685] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c38b40d2-9784-459f-be46-5ba512939969 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.945036] env[62405]: DEBUG nova.network.neutron [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] [instance: 0eec4a5f-9f9b-4a86-a046-2e2d107adc48] Successfully updated port: ffc190ac-0f46-477c-bb7a-53327b5884bb {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1515.957861] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-a7ffb043-2765-4989-b831-725982cf077a tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] [instance: 02abae6c-8962-49eb-8fa9-36b13a20eff1] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1515.958204] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-a7ffb043-2765-4989-b831-725982cf077a tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] [instance: 02abae6c-8962-49eb-8fa9-36b13a20eff1] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1515.958464] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-a7ffb043-2765-4989-b831-725982cf077a tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Deleting the datastore file [datastore1] 02abae6c-8962-49eb-8fa9-36b13a20eff1 {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1515.958989] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-039d93ff-9804-4762-b504-93db01f83a55 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.970862] env[62405]: DEBUG oslo_vmware.api [None req-a7ffb043-2765-4989-b831-725982cf077a tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Waiting for the task: (returnval){ [ 1515.970862] env[62405]: value = "task-1946843" [ 1515.970862] env[62405]: _type = "Task" [ 1515.970862] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1515.987935] env[62405]: DEBUG oslo_vmware.api [None req-a7ffb043-2765-4989-b831-725982cf077a tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Task: {'id': task-1946843, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1516.081976] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance ca4d11fe-1d0f-468b-a2f4-21c5b84342ab has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1516.135744] env[62405]: DEBUG oslo_concurrency.lockutils [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1516.238373] env[62405]: DEBUG oslo_vmware.api [None req-e791cc57-ae0f-4404-8e7f-e93fe6fdcaf7 tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] Task: {'id': task-1946838, 'name': PowerOnVM_Task, 'duration_secs': 1.418503} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1516.238892] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-e791cc57-ae0f-4404-8e7f-e93fe6fdcaf7 tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] [instance: 3f9849b8-6aaa-4d32-b140-207d5b54d68f] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1516.240484] env[62405]: INFO nova.compute.manager [None req-e791cc57-ae0f-4404-8e7f-e93fe6fdcaf7 tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] [instance: 3f9849b8-6aaa-4d32-b140-207d5b54d68f] Took 7.18 seconds to spawn the instance on the hypervisor. [ 1516.240484] env[62405]: DEBUG nova.compute.manager [None req-e791cc57-ae0f-4404-8e7f-e93fe6fdcaf7 tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] [instance: 3f9849b8-6aaa-4d32-b140-207d5b54d68f] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1516.240745] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50ccd19d-f8ca-472a-bee5-6f7b83e764f7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.262151] env[62405]: DEBUG oslo_vmware.api [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Task: {'id': task-1946840, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1516.413304] env[62405]: DEBUG nova.network.neutron [req-32e615d3-9ccc-4a29-9f19-c5e76986e183 req-5939b222-ddd6-4b5d-899c-30285b07ff65 service nova] [instance: 7db1b086-942e-4890-8750-0d717e522786] Updated VIF entry in instance network info cache for port acb33455-b824-40fd-99bd-4628778412a0. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1516.413690] env[62405]: DEBUG nova.network.neutron [req-32e615d3-9ccc-4a29-9f19-c5e76986e183 req-5939b222-ddd6-4b5d-899c-30285b07ff65 service nova] [instance: 7db1b086-942e-4890-8750-0d717e522786] Updating instance_info_cache with network_info: [{"id": "acb33455-b824-40fd-99bd-4628778412a0", "address": "fa:16:3e:c6:b0:18", "network": {"id": "24a4e5e0-178e-4713-b3b3-db2044169596", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1947982707-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.234", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4ac6737e7e8649d5a1061806cb927ed6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bfae3ef8-cae7-455d-8632-ba93e1671625", "external-id": "cl2-zone-841", "segmentation_id": 841, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapacb33455-b8", "ovs_interfaceid": "acb33455-b824-40fd-99bd-4628778412a0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1516.449363] env[62405]: DEBUG oslo_concurrency.lockutils [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Acquiring lock "refresh_cache-0eec4a5f-9f9b-4a86-a046-2e2d107adc48" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1516.449363] env[62405]: DEBUG oslo_concurrency.lockutils [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Acquired lock "refresh_cache-0eec4a5f-9f9b-4a86-a046-2e2d107adc48" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1516.449363] env[62405]: DEBUG nova.network.neutron [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] [instance: 0eec4a5f-9f9b-4a86-a046-2e2d107adc48] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1516.483024] env[62405]: DEBUG oslo_vmware.api [None req-a7ffb043-2765-4989-b831-725982cf077a tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Task: {'id': task-1946843, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1516.584768] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance b3647042-89a1-4d15-b85e-49a5c8def1d4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1516.587980] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance b8ff115b-64f1-4584-afa2-478c5e6b726b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1516.587980] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 8995f9cb-8454-4a98-9090-290f87f8af18 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1516.587980] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance a1d35009-ea11-4e64-bbe4-604ed39d08f4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1516.587980] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance ca0aca02-4b99-4393-900c-b9cb0dad55c7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1516.588352] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 02abae6c-8962-49eb-8fa9-36b13a20eff1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1516.588352] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 058682a1-5240-4414-9203-c612ecd12999 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1516.588352] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Migration 80a37923-23e1-4b60-aaf2-72933f6694b9 is active on this compute host and has allocations in placement: {'resources': {'VCPU': 1, 'MEMORY_MB': 192, 'DISK_GB': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1516.769807] env[62405]: DEBUG oslo_vmware.api [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Task: {'id': task-1946840, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.619717} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1516.772536] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 792cd2c8-a67d-4b16-93ab-722fcc8b622d/792cd2c8-a67d-4b16-93ab-722fcc8b622d.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1516.772766] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] [instance: 792cd2c8-a67d-4b16-93ab-722fcc8b622d] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1516.773284] env[62405]: INFO nova.compute.manager [None req-e791cc57-ae0f-4404-8e7f-e93fe6fdcaf7 tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] [instance: 3f9849b8-6aaa-4d32-b140-207d5b54d68f] Took 21.36 seconds to build instance. [ 1516.774161] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-476ee0c3-c926-49ec-b39e-8b784fe7d9b7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.785470] env[62405]: DEBUG oslo_vmware.api [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Waiting for the task: (returnval){ [ 1516.785470] env[62405]: value = "task-1946844" [ 1516.785470] env[62405]: _type = "Task" [ 1516.785470] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1516.797101] env[62405]: DEBUG oslo_vmware.api [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Task: {'id': task-1946844, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1516.918374] env[62405]: DEBUG oslo_concurrency.lockutils [req-32e615d3-9ccc-4a29-9f19-c5e76986e183 req-5939b222-ddd6-4b5d-899c-30285b07ff65 service nova] Releasing lock "refresh_cache-7db1b086-942e-4890-8750-0d717e522786" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1516.918861] env[62405]: DEBUG nova.compute.manager [req-32e615d3-9ccc-4a29-9f19-c5e76986e183 req-5939b222-ddd6-4b5d-899c-30285b07ff65 service nova] [instance: 792cd2c8-a67d-4b16-93ab-722fcc8b622d] Received event network-changed-c1f4fd8a-cda2-4206-b706-58f6fa8c722e {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1516.919092] env[62405]: DEBUG nova.compute.manager [req-32e615d3-9ccc-4a29-9f19-c5e76986e183 req-5939b222-ddd6-4b5d-899c-30285b07ff65 service nova] [instance: 792cd2c8-a67d-4b16-93ab-722fcc8b622d] Refreshing instance network info cache due to event network-changed-c1f4fd8a-cda2-4206-b706-58f6fa8c722e. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1516.919324] env[62405]: DEBUG oslo_concurrency.lockutils [req-32e615d3-9ccc-4a29-9f19-c5e76986e183 req-5939b222-ddd6-4b5d-899c-30285b07ff65 service nova] Acquiring lock "refresh_cache-792cd2c8-a67d-4b16-93ab-722fcc8b622d" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1516.919469] env[62405]: DEBUG oslo_concurrency.lockutils [req-32e615d3-9ccc-4a29-9f19-c5e76986e183 req-5939b222-ddd6-4b5d-899c-30285b07ff65 service nova] Acquired lock "refresh_cache-792cd2c8-a67d-4b16-93ab-722fcc8b622d" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1516.919680] env[62405]: DEBUG nova.network.neutron [req-32e615d3-9ccc-4a29-9f19-c5e76986e183 req-5939b222-ddd6-4b5d-899c-30285b07ff65 service nova] [instance: 792cd2c8-a67d-4b16-93ab-722fcc8b622d] Refreshing network info cache for port c1f4fd8a-cda2-4206-b706-58f6fa8c722e {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1516.982662] env[62405]: DEBUG oslo_vmware.api [None req-a7ffb043-2765-4989-b831-725982cf077a tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Task: {'id': task-1946843, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.528935} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1516.983041] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-a7ffb043-2765-4989-b831-725982cf077a tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1516.983659] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-a7ffb043-2765-4989-b831-725982cf077a tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] [instance: 02abae6c-8962-49eb-8fa9-36b13a20eff1] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1516.983659] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-a7ffb043-2765-4989-b831-725982cf077a tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] [instance: 02abae6c-8962-49eb-8fa9-36b13a20eff1] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1516.983659] env[62405]: INFO nova.compute.manager [None req-a7ffb043-2765-4989-b831-725982cf077a tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] [instance: 02abae6c-8962-49eb-8fa9-36b13a20eff1] Took 1.65 seconds to destroy the instance on the hypervisor. [ 1516.984127] env[62405]: DEBUG oslo.service.loopingcall [None req-a7ffb043-2765-4989-b831-725982cf077a tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1516.984688] env[62405]: DEBUG nova.compute.manager [-] [instance: 02abae6c-8962-49eb-8fa9-36b13a20eff1] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1516.984744] env[62405]: DEBUG nova.network.neutron [-] [instance: 02abae6c-8962-49eb-8fa9-36b13a20eff1] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1517.000603] env[62405]: DEBUG nova.network.neutron [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] [instance: 0eec4a5f-9f9b-4a86-a046-2e2d107adc48] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1517.089746] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance e8ed73c3-fb86-42c3-aae6-b0c8d03149ce has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1517.272630] env[62405]: DEBUG nova.compute.manager [req-8d0273c4-2e2d-4ab1-9f13-d39351c07365 req-40096379-b664-4302-ae83-68aa71c9c8b0 service nova] [instance: 0eec4a5f-9f9b-4a86-a046-2e2d107adc48] Received event network-vif-plugged-ffc190ac-0f46-477c-bb7a-53327b5884bb {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1517.272902] env[62405]: DEBUG oslo_concurrency.lockutils [req-8d0273c4-2e2d-4ab1-9f13-d39351c07365 req-40096379-b664-4302-ae83-68aa71c9c8b0 service nova] Acquiring lock "0eec4a5f-9f9b-4a86-a046-2e2d107adc48-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1517.273179] env[62405]: DEBUG oslo_concurrency.lockutils [req-8d0273c4-2e2d-4ab1-9f13-d39351c07365 req-40096379-b664-4302-ae83-68aa71c9c8b0 service nova] Lock "0eec4a5f-9f9b-4a86-a046-2e2d107adc48-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1517.273397] env[62405]: DEBUG oslo_concurrency.lockutils [req-8d0273c4-2e2d-4ab1-9f13-d39351c07365 req-40096379-b664-4302-ae83-68aa71c9c8b0 service nova] Lock "0eec4a5f-9f9b-4a86-a046-2e2d107adc48-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1517.273726] env[62405]: DEBUG nova.compute.manager [req-8d0273c4-2e2d-4ab1-9f13-d39351c07365 req-40096379-b664-4302-ae83-68aa71c9c8b0 service nova] [instance: 0eec4a5f-9f9b-4a86-a046-2e2d107adc48] No waiting events found dispatching network-vif-plugged-ffc190ac-0f46-477c-bb7a-53327b5884bb {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1517.273792] env[62405]: WARNING nova.compute.manager [req-8d0273c4-2e2d-4ab1-9f13-d39351c07365 req-40096379-b664-4302-ae83-68aa71c9c8b0 service nova] [instance: 0eec4a5f-9f9b-4a86-a046-2e2d107adc48] Received unexpected event network-vif-plugged-ffc190ac-0f46-477c-bb7a-53327b5884bb for instance with vm_state building and task_state spawning. [ 1517.273943] env[62405]: DEBUG nova.compute.manager [req-8d0273c4-2e2d-4ab1-9f13-d39351c07365 req-40096379-b664-4302-ae83-68aa71c9c8b0 service nova] [instance: 0eec4a5f-9f9b-4a86-a046-2e2d107adc48] Received event network-changed-ffc190ac-0f46-477c-bb7a-53327b5884bb {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1517.274204] env[62405]: DEBUG nova.compute.manager [req-8d0273c4-2e2d-4ab1-9f13-d39351c07365 req-40096379-b664-4302-ae83-68aa71c9c8b0 service nova] [instance: 0eec4a5f-9f9b-4a86-a046-2e2d107adc48] Refreshing instance network info cache due to event network-changed-ffc190ac-0f46-477c-bb7a-53327b5884bb. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1517.274342] env[62405]: DEBUG oslo_concurrency.lockutils [req-8d0273c4-2e2d-4ab1-9f13-d39351c07365 req-40096379-b664-4302-ae83-68aa71c9c8b0 service nova] Acquiring lock "refresh_cache-0eec4a5f-9f9b-4a86-a046-2e2d107adc48" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1517.275538] env[62405]: DEBUG nova.network.neutron [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] [instance: 0eec4a5f-9f9b-4a86-a046-2e2d107adc48] Updating instance_info_cache with network_info: [{"id": "ffc190ac-0f46-477c-bb7a-53327b5884bb", "address": "fa:16:3e:73:44:d7", "network": {"id": "1c3b400a-b627-4a1c-a4a8-87022944fdf8", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-644457546-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "25a20f4605e14a52bdf53052483b7e1a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73eeba7c-29e1-4fdf-82b3-d62e63e86051", "external-id": "cl2-zone-659", "segmentation_id": 659, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapffc190ac-0f", "ovs_interfaceid": "ffc190ac-0f46-477c-bb7a-53327b5884bb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1517.277508] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e791cc57-ae0f-4404-8e7f-e93fe6fdcaf7 tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] Lock "3f9849b8-6aaa-4d32-b140-207d5b54d68f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.887s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1517.297360] env[62405]: DEBUG oslo_vmware.api [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Task: {'id': task-1946844, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.285814} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1517.298132] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] [instance: 792cd2c8-a67d-4b16-93ab-722fcc8b622d] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1517.299143] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0b7733b-5173-43ce-9b16-26203db3a165 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.328899] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] [instance: 792cd2c8-a67d-4b16-93ab-722fcc8b622d] Reconfiguring VM instance instance-00000011 to attach disk [datastore1] 792cd2c8-a67d-4b16-93ab-722fcc8b622d/792cd2c8-a67d-4b16-93ab-722fcc8b622d.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1517.329690] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3c540369-6747-42c0-ad89-081ce3f2f603 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.357730] env[62405]: DEBUG oslo_vmware.api [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Waiting for the task: (returnval){ [ 1517.357730] env[62405]: value = "task-1946845" [ 1517.357730] env[62405]: _type = "Task" [ 1517.357730] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1517.367349] env[62405]: DEBUG oslo_vmware.api [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Task: {'id': task-1946845, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1517.593152] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 15218373-ffa5-49ce-b604-423b7fc5fb35 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1517.593374] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 792cd2c8-a67d-4b16-93ab-722fcc8b622d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1517.593699] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Total usable vcpus: 48, total allocated vcpus: 15 {{(pid=62405) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1517.593931] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3392MB phys_disk=200GB used_disk=14GB total_vcpus=48 used_vcpus=15 pci_stats=[] {{(pid=62405) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1517.778975] env[62405]: DEBUG oslo_concurrency.lockutils [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Releasing lock "refresh_cache-0eec4a5f-9f9b-4a86-a046-2e2d107adc48" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1517.779460] env[62405]: DEBUG nova.compute.manager [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] [instance: 0eec4a5f-9f9b-4a86-a046-2e2d107adc48] Instance network_info: |[{"id": "ffc190ac-0f46-477c-bb7a-53327b5884bb", "address": "fa:16:3e:73:44:d7", "network": {"id": "1c3b400a-b627-4a1c-a4a8-87022944fdf8", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-644457546-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "25a20f4605e14a52bdf53052483b7e1a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73eeba7c-29e1-4fdf-82b3-d62e63e86051", "external-id": "cl2-zone-659", "segmentation_id": 659, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapffc190ac-0f", "ovs_interfaceid": "ffc190ac-0f46-477c-bb7a-53327b5884bb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1517.779803] env[62405]: DEBUG oslo_concurrency.lockutils [req-8d0273c4-2e2d-4ab1-9f13-d39351c07365 req-40096379-b664-4302-ae83-68aa71c9c8b0 service nova] Acquired lock "refresh_cache-0eec4a5f-9f9b-4a86-a046-2e2d107adc48" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1517.780028] env[62405]: DEBUG nova.network.neutron [req-8d0273c4-2e2d-4ab1-9f13-d39351c07365 req-40096379-b664-4302-ae83-68aa71c9c8b0 service nova] [instance: 0eec4a5f-9f9b-4a86-a046-2e2d107adc48] Refreshing network info cache for port ffc190ac-0f46-477c-bb7a-53327b5884bb {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1517.781316] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] [instance: 0eec4a5f-9f9b-4a86-a046-2e2d107adc48] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:73:44:d7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '73eeba7c-29e1-4fdf-82b3-d62e63e86051', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ffc190ac-0f46-477c-bb7a-53327b5884bb', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1517.791017] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Creating folder: Project (25a20f4605e14a52bdf53052483b7e1a). Parent ref: group-v401284. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1517.792371] env[62405]: DEBUG nova.compute.manager [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1517.795819] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f563d0f8-f1b8-4cf6-acfe-0cc9ce74c56f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.800980] env[62405]: DEBUG nova.network.neutron [req-32e615d3-9ccc-4a29-9f19-c5e76986e183 req-5939b222-ddd6-4b5d-899c-30285b07ff65 service nova] [instance: 792cd2c8-a67d-4b16-93ab-722fcc8b622d] Updated VIF entry in instance network info cache for port c1f4fd8a-cda2-4206-b706-58f6fa8c722e. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1517.801310] env[62405]: DEBUG nova.network.neutron [req-32e615d3-9ccc-4a29-9f19-c5e76986e183 req-5939b222-ddd6-4b5d-899c-30285b07ff65 service nova] [instance: 792cd2c8-a67d-4b16-93ab-722fcc8b622d] Updating instance_info_cache with network_info: [{"id": "c1f4fd8a-cda2-4206-b706-58f6fa8c722e", "address": "fa:16:3e:97:6f:79", "network": {"id": "5289661e-534d-40b0-8a99-75fc7c9da4f3", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1070103644-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fe2966f5756b424fbfbca6677e4d948a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9e6b7d9f-c4e9-4623-9eb5-840ca1a8224c", "external-id": "nsx-vlan-transportzone-782", "segmentation_id": 782, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc1f4fd8a-cd", "ovs_interfaceid": "c1f4fd8a-cda2-4206-b706-58f6fa8c722e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1517.817207] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Created folder: Project (25a20f4605e14a52bdf53052483b7e1a) in parent group-v401284. [ 1517.818085] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Creating folder: Instances. Parent ref: group-v401339. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1517.818085] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1689a504-7d76-4eda-adcf-773ce689fa84 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.831054] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Created folder: Instances in parent group-v401339. [ 1517.831364] env[62405]: DEBUG oslo.service.loopingcall [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1517.834485] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0eec4a5f-9f9b-4a86-a046-2e2d107adc48] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1517.834910] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bf46fb19-df6d-4261-affa-73bcaaa20253 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.856735] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1517.856735] env[62405]: value = "task-1946848" [ 1517.856735] env[62405]: _type = "Task" [ 1517.856735] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1517.874601] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946848, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1517.875726] env[62405]: DEBUG oslo_vmware.api [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Task: {'id': task-1946845, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1517.944255] env[62405]: DEBUG nova.network.neutron [-] [instance: 02abae6c-8962-49eb-8fa9-36b13a20eff1] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1518.106110] env[62405]: DEBUG oslo_concurrency.lockutils [None req-df06cd55-95fe-4b37-813d-25bb8e23e856 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Acquiring lock "8995f9cb-8454-4a98-9090-290f87f8af18" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1518.106370] env[62405]: DEBUG oslo_concurrency.lockutils [None req-df06cd55-95fe-4b37-813d-25bb8e23e856 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Lock "8995f9cb-8454-4a98-9090-290f87f8af18" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1518.106525] env[62405]: DEBUG oslo_concurrency.lockutils [None req-df06cd55-95fe-4b37-813d-25bb8e23e856 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Acquiring lock "8995f9cb-8454-4a98-9090-290f87f8af18-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1518.106714] env[62405]: DEBUG oslo_concurrency.lockutils [None req-df06cd55-95fe-4b37-813d-25bb8e23e856 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Lock "8995f9cb-8454-4a98-9090-290f87f8af18-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1518.106883] env[62405]: DEBUG oslo_concurrency.lockutils [None req-df06cd55-95fe-4b37-813d-25bb8e23e856 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Lock "8995f9cb-8454-4a98-9090-290f87f8af18-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1518.111271] env[62405]: INFO nova.compute.manager [None req-df06cd55-95fe-4b37-813d-25bb8e23e856 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: 8995f9cb-8454-4a98-9090-290f87f8af18] Terminating instance [ 1518.126202] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3252fdbe-3ec4-487d-96b8-be7ae7c49eaa {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.138066] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a867237-89ee-47f0-973a-dfa002f2d8a9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.171252] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb585a6c-5ee9-452f-9a62-ef034a38c745 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.180316] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41610575-00fd-41aa-9c1b-06327ac33ed7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.195761] env[62405]: DEBUG nova.compute.provider_tree [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1518.303648] env[62405]: DEBUG oslo_concurrency.lockutils [req-32e615d3-9ccc-4a29-9f19-c5e76986e183 req-5939b222-ddd6-4b5d-899c-30285b07ff65 service nova] Releasing lock "refresh_cache-792cd2c8-a67d-4b16-93ab-722fcc8b622d" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1518.316746] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1518.380173] env[62405]: DEBUG oslo_vmware.api [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Task: {'id': task-1946845, 'name': ReconfigVM_Task, 'duration_secs': 0.733531} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1518.384031] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946848, 'name': CreateVM_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1518.384031] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] [instance: 792cd2c8-a67d-4b16-93ab-722fcc8b622d] Reconfigured VM instance instance-00000011 to attach disk [datastore1] 792cd2c8-a67d-4b16-93ab-722fcc8b622d/792cd2c8-a67d-4b16-93ab-722fcc8b622d.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1518.384304] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8e5101b2-060c-4942-80ae-0b95bcf2ed71 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.392385] env[62405]: DEBUG oslo_vmware.api [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Waiting for the task: (returnval){ [ 1518.392385] env[62405]: value = "task-1946849" [ 1518.392385] env[62405]: _type = "Task" [ 1518.392385] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1518.402892] env[62405]: DEBUG oslo_vmware.api [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Task: {'id': task-1946849, 'name': Rename_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1518.448837] env[62405]: INFO nova.compute.manager [-] [instance: 02abae6c-8962-49eb-8fa9-36b13a20eff1] Took 1.46 seconds to deallocate network for instance. [ 1518.620134] env[62405]: DEBUG nova.compute.manager [None req-df06cd55-95fe-4b37-813d-25bb8e23e856 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: 8995f9cb-8454-4a98-9090-290f87f8af18] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1518.620134] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-df06cd55-95fe-4b37-813d-25bb8e23e856 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: 8995f9cb-8454-4a98-9090-290f87f8af18] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1518.621149] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e69615d1-484e-471d-b7bc-93541067ebae {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.635158] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-df06cd55-95fe-4b37-813d-25bb8e23e856 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: 8995f9cb-8454-4a98-9090-290f87f8af18] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1518.635678] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-94fbef48-73cd-49a1-864d-3c3ce2445c7e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.645211] env[62405]: DEBUG oslo_vmware.api [None req-df06cd55-95fe-4b37-813d-25bb8e23e856 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Waiting for the task: (returnval){ [ 1518.645211] env[62405]: value = "task-1946850" [ 1518.645211] env[62405]: _type = "Task" [ 1518.645211] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1518.658761] env[62405]: DEBUG oslo_vmware.api [None req-df06cd55-95fe-4b37-813d-25bb8e23e856 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Task: {'id': task-1946850, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1518.741031] env[62405]: DEBUG nova.scheduler.client.report [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Updated inventory for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with generation 37 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1518.741031] env[62405]: DEBUG nova.compute.provider_tree [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Updating resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 generation from 37 to 38 during operation: update_inventory {{(pid=62405) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1518.741031] env[62405]: DEBUG nova.compute.provider_tree [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1518.769166] env[62405]: DEBUG nova.network.neutron [req-8d0273c4-2e2d-4ab1-9f13-d39351c07365 req-40096379-b664-4302-ae83-68aa71c9c8b0 service nova] [instance: 0eec4a5f-9f9b-4a86-a046-2e2d107adc48] Updated VIF entry in instance network info cache for port ffc190ac-0f46-477c-bb7a-53327b5884bb. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1518.769219] env[62405]: DEBUG nova.network.neutron [req-8d0273c4-2e2d-4ab1-9f13-d39351c07365 req-40096379-b664-4302-ae83-68aa71c9c8b0 service nova] [instance: 0eec4a5f-9f9b-4a86-a046-2e2d107adc48] Updating instance_info_cache with network_info: [{"id": "ffc190ac-0f46-477c-bb7a-53327b5884bb", "address": "fa:16:3e:73:44:d7", "network": {"id": "1c3b400a-b627-4a1c-a4a8-87022944fdf8", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-644457546-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "25a20f4605e14a52bdf53052483b7e1a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73eeba7c-29e1-4fdf-82b3-d62e63e86051", "external-id": "cl2-zone-659", "segmentation_id": 659, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapffc190ac-0f", "ovs_interfaceid": "ffc190ac-0f46-477c-bb7a-53327b5884bb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1518.871851] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946848, 'name': CreateVM_Task, 'duration_secs': 0.616134} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1518.872309] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0eec4a5f-9f9b-4a86-a046-2e2d107adc48] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1518.873544] env[62405]: DEBUG oslo_concurrency.lockutils [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1518.873928] env[62405]: DEBUG oslo_concurrency.lockutils [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1518.874674] env[62405]: DEBUG oslo_concurrency.lockutils [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1518.876081] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8126099d-d26c-4c69-99ea-2b4ef4647255 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.885031] env[62405]: DEBUG oslo_vmware.api [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Waiting for the task: (returnval){ [ 1518.885031] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]526f4a01-bb4f-fe66-a526-d206b635f18b" [ 1518.885031] env[62405]: _type = "Task" [ 1518.885031] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1518.893447] env[62405]: DEBUG oslo_vmware.api [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]526f4a01-bb4f-fe66-a526-d206b635f18b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1518.902578] env[62405]: DEBUG oslo_vmware.api [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Task: {'id': task-1946849, 'name': Rename_Task, 'duration_secs': 0.240944} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1518.903162] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] [instance: 792cd2c8-a67d-4b16-93ab-722fcc8b622d] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1518.905388] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e00eb103-53da-4c44-ad3a-80da03277990 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.917251] env[62405]: DEBUG oslo_vmware.api [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Waiting for the task: (returnval){ [ 1518.917251] env[62405]: value = "task-1946851" [ 1518.917251] env[62405]: _type = "Task" [ 1518.917251] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1518.932677] env[62405]: DEBUG oslo_vmware.api [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Task: {'id': task-1946851, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1518.956068] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a7ffb043-2765-4989-b831-725982cf077a tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1519.159293] env[62405]: DEBUG oslo_vmware.api [None req-df06cd55-95fe-4b37-813d-25bb8e23e856 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Task: {'id': task-1946850, 'name': PowerOffVM_Task, 'duration_secs': 0.282601} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1519.159293] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-df06cd55-95fe-4b37-813d-25bb8e23e856 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: 8995f9cb-8454-4a98-9090-290f87f8af18] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1519.159293] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-df06cd55-95fe-4b37-813d-25bb8e23e856 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: 8995f9cb-8454-4a98-9090-290f87f8af18] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1519.159689] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-094fcca9-be54-401f-ae56-87b9c2bb034f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.245203] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62405) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1519.245480] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 7.255s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1519.245758] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7eb37233-64e1-4fca-802f-e3edb3d5e25d tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.872s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1519.246187] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7eb37233-64e1-4fca-802f-e3edb3d5e25d tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1519.249207] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ca3b109f-8056-46fe-bbbd-a9b0e2e826e1 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.309s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1519.249207] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ca3b109f-8056-46fe-bbbd-a9b0e2e826e1 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1519.250140] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e64ab66a-be13-4796-b011-3af711f24e74 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.140s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1519.252238] env[62405]: INFO nova.compute.claims [None req-e64ab66a-be13-4796-b011-3af711f24e74 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b3647042-89a1-4d15-b85e-49a5c8def1d4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1519.273193] env[62405]: DEBUG oslo_concurrency.lockutils [req-8d0273c4-2e2d-4ab1-9f13-d39351c07365 req-40096379-b664-4302-ae83-68aa71c9c8b0 service nova] Releasing lock "refresh_cache-0eec4a5f-9f9b-4a86-a046-2e2d107adc48" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1519.292771] env[62405]: INFO nova.scheduler.client.report [None req-7eb37233-64e1-4fca-802f-e3edb3d5e25d tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Deleted allocations for instance 8624629d-642a-4adf-984e-3925beeb4fef [ 1519.301444] env[62405]: INFO nova.scheduler.client.report [None req-ca3b109f-8056-46fe-bbbd-a9b0e2e826e1 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Deleted allocations for instance 2257c786-54f9-441a-832c-cf3178bfcc78 [ 1519.382709] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-df06cd55-95fe-4b37-813d-25bb8e23e856 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: 8995f9cb-8454-4a98-9090-290f87f8af18] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1519.383013] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-df06cd55-95fe-4b37-813d-25bb8e23e856 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: 8995f9cb-8454-4a98-9090-290f87f8af18] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1519.383221] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-df06cd55-95fe-4b37-813d-25bb8e23e856 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Deleting the datastore file [datastore1] 8995f9cb-8454-4a98-9090-290f87f8af18 {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1519.383533] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-82fca9bb-da91-453b-a4c0-1e9a865c3db9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.397474] env[62405]: DEBUG oslo_vmware.api [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]526f4a01-bb4f-fe66-a526-d206b635f18b, 'name': SearchDatastore_Task, 'duration_secs': 0.013356} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1519.399250] env[62405]: DEBUG oslo_concurrency.lockutils [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1519.399521] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] [instance: 0eec4a5f-9f9b-4a86-a046-2e2d107adc48] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1519.399774] env[62405]: DEBUG oslo_concurrency.lockutils [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1519.399947] env[62405]: DEBUG oslo_concurrency.lockutils [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1519.400514] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1519.400921] env[62405]: DEBUG oslo_vmware.api [None req-df06cd55-95fe-4b37-813d-25bb8e23e856 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Waiting for the task: (returnval){ [ 1519.400921] env[62405]: value = "task-1946853" [ 1519.400921] env[62405]: _type = "Task" [ 1519.400921] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1519.401708] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9a70baae-7c33-4db9-af82-3f6c4dcb6f1a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.415948] env[62405]: DEBUG oslo_vmware.api [None req-df06cd55-95fe-4b37-813d-25bb8e23e856 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Task: {'id': task-1946853, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1519.421172] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1519.421172] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1519.425207] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c236ae93-321f-47f4-b09f-6e1b3ed4f571 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.432215] env[62405]: DEBUG oslo_vmware.api [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Waiting for the task: (returnval){ [ 1519.432215] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52b9235e-06e9-6ed7-4c0f-dee415a5db70" [ 1519.432215] env[62405]: _type = "Task" [ 1519.432215] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1519.436230] env[62405]: DEBUG oslo_vmware.api [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Task: {'id': task-1946851, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1519.448520] env[62405]: DEBUG oslo_vmware.api [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52b9235e-06e9-6ed7-4c0f-dee415a5db70, 'name': SearchDatastore_Task} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1519.449847] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0760d04c-a0ce-427f-baa8-0dfda98c853b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.459183] env[62405]: DEBUG oslo_vmware.api [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Waiting for the task: (returnval){ [ 1519.459183] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52e4e981-7eff-ce21-cb3d-efe74b837732" [ 1519.459183] env[62405]: _type = "Task" [ 1519.459183] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1519.470206] env[62405]: DEBUG oslo_vmware.api [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52e4e981-7eff-ce21-cb3d-efe74b837732, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1519.802956] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7eb37233-64e1-4fca-802f-e3edb3d5e25d tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Lock "8624629d-642a-4adf-984e-3925beeb4fef" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.101s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1519.815267] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ca3b109f-8056-46fe-bbbd-a9b0e2e826e1 tempest-ServerDiagnosticsV248Test-1358286518 tempest-ServerDiagnosticsV248Test-1358286518-project-member] Lock "2257c786-54f9-441a-832c-cf3178bfcc78" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.772s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1519.914854] env[62405]: DEBUG oslo_vmware.api [None req-df06cd55-95fe-4b37-813d-25bb8e23e856 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Task: {'id': task-1946853, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.211745} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1519.915306] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-df06cd55-95fe-4b37-813d-25bb8e23e856 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1519.915641] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-df06cd55-95fe-4b37-813d-25bb8e23e856 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: 8995f9cb-8454-4a98-9090-290f87f8af18] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1519.915732] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-df06cd55-95fe-4b37-813d-25bb8e23e856 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: 8995f9cb-8454-4a98-9090-290f87f8af18] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1519.915878] env[62405]: INFO nova.compute.manager [None req-df06cd55-95fe-4b37-813d-25bb8e23e856 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: 8995f9cb-8454-4a98-9090-290f87f8af18] Took 1.30 seconds to destroy the instance on the hypervisor. [ 1519.916132] env[62405]: DEBUG oslo.service.loopingcall [None req-df06cd55-95fe-4b37-813d-25bb8e23e856 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1519.916329] env[62405]: DEBUG nova.compute.manager [-] [instance: 8995f9cb-8454-4a98-9090-290f87f8af18] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1519.916423] env[62405]: DEBUG nova.network.neutron [-] [instance: 8995f9cb-8454-4a98-9090-290f87f8af18] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1519.931370] env[62405]: DEBUG oslo_vmware.api [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Task: {'id': task-1946851, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1519.973448] env[62405]: DEBUG oslo_vmware.api [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52e4e981-7eff-ce21-cb3d-efe74b837732, 'name': SearchDatastore_Task, 'duration_secs': 0.013224} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1519.973724] env[62405]: DEBUG oslo_concurrency.lockutils [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1519.974019] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 0eec4a5f-9f9b-4a86-a046-2e2d107adc48/0eec4a5f-9f9b-4a86-a046-2e2d107adc48.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1519.974295] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1cb95a82-edbe-4aa9-a63b-c3a4e3c08d72 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.983781] env[62405]: DEBUG oslo_vmware.api [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Waiting for the task: (returnval){ [ 1519.983781] env[62405]: value = "task-1946854" [ 1519.983781] env[62405]: _type = "Task" [ 1519.983781] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1519.996382] env[62405]: DEBUG oslo_vmware.api [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Task: {'id': task-1946854, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1520.445648] env[62405]: DEBUG oslo_vmware.api [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Task: {'id': task-1946851, 'name': PowerOnVM_Task, 'duration_secs': 1.242939} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1520.446540] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] [instance: 792cd2c8-a67d-4b16-93ab-722fcc8b622d] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1520.447014] env[62405]: INFO nova.compute.manager [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] [instance: 792cd2c8-a67d-4b16-93ab-722fcc8b622d] Took 10.56 seconds to spawn the instance on the hypervisor. [ 1520.447014] env[62405]: DEBUG nova.compute.manager [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] [instance: 792cd2c8-a67d-4b16-93ab-722fcc8b622d] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1520.447983] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0f89b36-3393-46e1-b57a-ec734d89a5a5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.497342] env[62405]: DEBUG oslo_vmware.api [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Task: {'id': task-1946854, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1520.743533] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-876cea02-af8c-4856-84e2-48f78bd3ed6a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.754700] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00c88a06-4fa5-4bae-9fd1-be9d0d19890b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.793065] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fba8a5cf-c24b-4fbc-8e0d-da61924d16a7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.805034] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-843f0936-35ac-4401-a034-30725933cd65 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.822473] env[62405]: DEBUG nova.compute.provider_tree [None req-e64ab66a-be13-4796-b011-3af711f24e74 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1520.978180] env[62405]: INFO nova.compute.manager [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] [instance: 792cd2c8-a67d-4b16-93ab-722fcc8b622d] Took 21.41 seconds to build instance. [ 1520.999558] env[62405]: DEBUG oslo_vmware.api [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Task: {'id': task-1946854, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.006322} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1520.999558] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 0eec4a5f-9f9b-4a86-a046-2e2d107adc48/0eec4a5f-9f9b-4a86-a046-2e2d107adc48.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1520.999558] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] [instance: 0eec4a5f-9f9b-4a86-a046-2e2d107adc48] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1521.000320] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e34e84da-ca6f-41bc-9842-045a3dd64d28 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.009742] env[62405]: DEBUG oslo_vmware.api [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Waiting for the task: (returnval){ [ 1521.009742] env[62405]: value = "task-1946855" [ 1521.009742] env[62405]: _type = "Task" [ 1521.009742] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1521.019869] env[62405]: DEBUG nova.network.neutron [-] [instance: 8995f9cb-8454-4a98-9090-290f87f8af18] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1521.029346] env[62405]: DEBUG oslo_vmware.api [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Task: {'id': task-1946855, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1521.326495] env[62405]: DEBUG nova.scheduler.client.report [None req-e64ab66a-be13-4796-b011-3af711f24e74 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1521.480311] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e7ca307f-c246-487e-b437-2761ca25971f tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Lock "792cd2c8-a67d-4b16-93ab-722fcc8b622d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.006s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1521.523991] env[62405]: INFO nova.compute.manager [-] [instance: 8995f9cb-8454-4a98-9090-290f87f8af18] Took 1.61 seconds to deallocate network for instance. [ 1521.524600] env[62405]: DEBUG oslo_vmware.api [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Task: {'id': task-1946855, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.263988} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1521.527052] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] [instance: 0eec4a5f-9f9b-4a86-a046-2e2d107adc48] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1521.528186] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-527ef56a-0bd5-4f12-83fd-9524ce47a076 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.559440] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] [instance: 0eec4a5f-9f9b-4a86-a046-2e2d107adc48] Reconfiguring VM instance instance-00000012 to attach disk [datastore1] 0eec4a5f-9f9b-4a86-a046-2e2d107adc48/0eec4a5f-9f9b-4a86-a046-2e2d107adc48.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1521.560035] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a3ab782b-0124-481e-abff-eaf96b3de5e7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.588263] env[62405]: DEBUG oslo_vmware.api [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Waiting for the task: (returnval){ [ 1521.588263] env[62405]: value = "task-1946856" [ 1521.588263] env[62405]: _type = "Task" [ 1521.588263] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1521.601225] env[62405]: DEBUG oslo_vmware.api [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Task: {'id': task-1946856, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1521.829341] env[62405]: DEBUG nova.compute.manager [req-be5e509c-e971-48b6-89de-08c5dabbfbe5 req-4919f165-d61e-43f7-99f2-f6bea12bc21e service nova] [instance: 02abae6c-8962-49eb-8fa9-36b13a20eff1] Received event network-vif-deleted-77f01bbe-48b5-4ad3-b215-90ff9d429d0b {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1521.833631] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e64ab66a-be13-4796-b011-3af711f24e74 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.583s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1521.834430] env[62405]: DEBUG nova.compute.manager [None req-e64ab66a-be13-4796-b011-3af711f24e74 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b3647042-89a1-4d15-b85e-49a5c8def1d4] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1521.838521] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.360s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1521.839991] env[62405]: INFO nova.compute.claims [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] [instance: fbedaa93-5968-4b42-b93e-201d2b44b32b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1521.947775] env[62405]: DEBUG nova.compute.manager [req-579b9272-5b85-450d-ac43-ceec2bcf4f25 req-bf792509-b930-4262-a333-4eb2d0444271 service nova] [instance: 3f9849b8-6aaa-4d32-b140-207d5b54d68f] Received event network-changed-ec10e8ab-572c-4bfa-810d-befff7776996 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1521.948166] env[62405]: DEBUG nova.compute.manager [req-579b9272-5b85-450d-ac43-ceec2bcf4f25 req-bf792509-b930-4262-a333-4eb2d0444271 service nova] [instance: 3f9849b8-6aaa-4d32-b140-207d5b54d68f] Refreshing instance network info cache due to event network-changed-ec10e8ab-572c-4bfa-810d-befff7776996. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1521.948388] env[62405]: DEBUG oslo_concurrency.lockutils [req-579b9272-5b85-450d-ac43-ceec2bcf4f25 req-bf792509-b930-4262-a333-4eb2d0444271 service nova] Acquiring lock "refresh_cache-3f9849b8-6aaa-4d32-b140-207d5b54d68f" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1521.948622] env[62405]: DEBUG oslo_concurrency.lockutils [req-579b9272-5b85-450d-ac43-ceec2bcf4f25 req-bf792509-b930-4262-a333-4eb2d0444271 service nova] Acquired lock "refresh_cache-3f9849b8-6aaa-4d32-b140-207d5b54d68f" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1521.948622] env[62405]: DEBUG nova.network.neutron [req-579b9272-5b85-450d-ac43-ceec2bcf4f25 req-bf792509-b930-4262-a333-4eb2d0444271 service nova] [instance: 3f9849b8-6aaa-4d32-b140-207d5b54d68f] Refreshing network info cache for port ec10e8ab-572c-4bfa-810d-befff7776996 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1521.984156] env[62405]: DEBUG nova.compute.manager [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: e8ed73c3-fb86-42c3-aae6-b0c8d03149ce] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1522.037148] env[62405]: DEBUG oslo_concurrency.lockutils [None req-df06cd55-95fe-4b37-813d-25bb8e23e856 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1522.100099] env[62405]: DEBUG oslo_vmware.api [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Task: {'id': task-1946856, 'name': ReconfigVM_Task, 'duration_secs': 0.309629} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1522.100938] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] [instance: 0eec4a5f-9f9b-4a86-a046-2e2d107adc48] Reconfigured VM instance instance-00000012 to attach disk [datastore1] 0eec4a5f-9f9b-4a86-a046-2e2d107adc48/0eec4a5f-9f9b-4a86-a046-2e2d107adc48.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1522.101744] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ccb2ede1-45e9-4a42-b31c-2812d84e6412 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.110518] env[62405]: DEBUG oslo_vmware.api [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Waiting for the task: (returnval){ [ 1522.110518] env[62405]: value = "task-1946857" [ 1522.110518] env[62405]: _type = "Task" [ 1522.110518] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1522.121214] env[62405]: DEBUG oslo_vmware.api [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Task: {'id': task-1946857, 'name': Rename_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1522.347685] env[62405]: DEBUG nova.compute.utils [None req-e64ab66a-be13-4796-b011-3af711f24e74 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1522.351441] env[62405]: DEBUG nova.compute.manager [None req-e64ab66a-be13-4796-b011-3af711f24e74 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b3647042-89a1-4d15-b85e-49a5c8def1d4] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1522.353016] env[62405]: DEBUG nova.network.neutron [None req-e64ab66a-be13-4796-b011-3af711f24e74 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b3647042-89a1-4d15-b85e-49a5c8def1d4] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1522.456370] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3e4d01c4-e5d5-43fc-99ce-5a64916ac21b tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] Acquiring lock "fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1522.456496] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3e4d01c4-e5d5-43fc-99ce-5a64916ac21b tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] Lock "fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1522.511342] env[62405]: DEBUG oslo_concurrency.lockutils [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1522.550208] env[62405]: DEBUG nova.policy [None req-e64ab66a-be13-4796-b011-3af711f24e74 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '41b79ac0838e4c0198236033d43199db', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd5178d8cc12f46f3a8599384d4be9b6b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1522.626200] env[62405]: DEBUG oslo_vmware.api [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Task: {'id': task-1946857, 'name': Rename_Task, 'duration_secs': 0.265866} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1522.626869] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] [instance: 0eec4a5f-9f9b-4a86-a046-2e2d107adc48] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1522.627644] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3c8fa183-a79d-46d5-a30a-088406af563a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.638521] env[62405]: DEBUG oslo_vmware.api [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Waiting for the task: (returnval){ [ 1522.638521] env[62405]: value = "task-1946858" [ 1522.638521] env[62405]: _type = "Task" [ 1522.638521] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1522.648505] env[62405]: DEBUG oslo_vmware.api [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Task: {'id': task-1946858, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1522.856952] env[62405]: DEBUG nova.compute.manager [None req-e64ab66a-be13-4796-b011-3af711f24e74 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b3647042-89a1-4d15-b85e-49a5c8def1d4] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1523.094087] env[62405]: DEBUG nova.network.neutron [req-579b9272-5b85-450d-ac43-ceec2bcf4f25 req-bf792509-b930-4262-a333-4eb2d0444271 service nova] [instance: 3f9849b8-6aaa-4d32-b140-207d5b54d68f] Updated VIF entry in instance network info cache for port ec10e8ab-572c-4bfa-810d-befff7776996. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1523.094454] env[62405]: DEBUG nova.network.neutron [req-579b9272-5b85-450d-ac43-ceec2bcf4f25 req-bf792509-b930-4262-a333-4eb2d0444271 service nova] [instance: 3f9849b8-6aaa-4d32-b140-207d5b54d68f] Updating instance_info_cache with network_info: [{"id": "ec10e8ab-572c-4bfa-810d-befff7776996", "address": "fa:16:3e:6e:8c:bf", "network": {"id": "6406e850-662e-40f1-8855-1b2f61663441", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1543725241-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.171", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9f4c6a0483674c7286fb3edcb24f70d8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e05affa-2640-435e-a124-0ee8a6ab1152", "external-id": "nsx-vlan-transportzone-839", "segmentation_id": 839, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapec10e8ab-57", "ovs_interfaceid": "ec10e8ab-572c-4bfa-810d-befff7776996", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1523.149203] env[62405]: DEBUG oslo_vmware.api [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Task: {'id': task-1946858, 'name': PowerOnVM_Task} progress is 1%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1523.248266] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1523.248266] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1523.271784] env[62405]: DEBUG nova.network.neutron [None req-e64ab66a-be13-4796-b011-3af711f24e74 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b3647042-89a1-4d15-b85e-49a5c8def1d4] Successfully created port: 2ced9062-28c5-4183-a8d8-397cd40c9130 {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1523.314390] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7de8bd60-4201-47e1-988c-8a842421bcf7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.323136] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9c6bf8e-8441-49d0-9a53-2c1ffcc6d092 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.356277] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da11eddb-df53-4b1f-bfae-322b2dc95c7d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.364944] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d6c5fb1-c47d-4bfe-b0c3-58eff2116d69 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.383772] env[62405]: DEBUG nova.compute.provider_tree [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1523.602134] env[62405]: DEBUG oslo_concurrency.lockutils [req-579b9272-5b85-450d-ac43-ceec2bcf4f25 req-bf792509-b930-4262-a333-4eb2d0444271 service nova] Releasing lock "refresh_cache-3f9849b8-6aaa-4d32-b140-207d5b54d68f" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1523.650874] env[62405]: DEBUG oslo_vmware.api [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Task: {'id': task-1946858, 'name': PowerOnVM_Task} progress is 64%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1523.874203] env[62405]: DEBUG nova.compute.manager [None req-e64ab66a-be13-4796-b011-3af711f24e74 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b3647042-89a1-4d15-b85e-49a5c8def1d4] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1523.905042] env[62405]: DEBUG oslo_concurrency.lockutils [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Acquiring lock "14dab775-19b4-4d0d-a7ee-67705f7e45ca" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1523.905146] env[62405]: DEBUG oslo_concurrency.lockutils [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Lock "14dab775-19b4-4d0d-a7ee-67705f7e45ca" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1523.908753] env[62405]: DEBUG nova.virt.hardware [None req-e64ab66a-be13-4796-b011-3af711f24e74 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1523.908753] env[62405]: DEBUG nova.virt.hardware [None req-e64ab66a-be13-4796-b011-3af711f24e74 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1523.909274] env[62405]: DEBUG nova.virt.hardware [None req-e64ab66a-be13-4796-b011-3af711f24e74 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1523.909274] env[62405]: DEBUG nova.virt.hardware [None req-e64ab66a-be13-4796-b011-3af711f24e74 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1523.909274] env[62405]: DEBUG nova.virt.hardware [None req-e64ab66a-be13-4796-b011-3af711f24e74 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1523.910277] env[62405]: DEBUG nova.virt.hardware [None req-e64ab66a-be13-4796-b011-3af711f24e74 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1523.910277] env[62405]: DEBUG nova.virt.hardware [None req-e64ab66a-be13-4796-b011-3af711f24e74 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1523.910277] env[62405]: DEBUG nova.virt.hardware [None req-e64ab66a-be13-4796-b011-3af711f24e74 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1523.910277] env[62405]: DEBUG nova.virt.hardware [None req-e64ab66a-be13-4796-b011-3af711f24e74 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1523.910277] env[62405]: DEBUG nova.virt.hardware [None req-e64ab66a-be13-4796-b011-3af711f24e74 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1523.910717] env[62405]: DEBUG nova.virt.hardware [None req-e64ab66a-be13-4796-b011-3af711f24e74 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1523.911681] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16ffffcb-6478-451f-9b05-a6ecd03111c9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.915416] env[62405]: ERROR nova.scheduler.client.report [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] [req-ffb9f9c7-ad15-4e67-8cb4-ec3d94f7dd39] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7d5eded7-a501-4fa6-b1d3-60e273d555d7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-ffb9f9c7-ad15-4e67-8cb4-ec3d94f7dd39"}]} [ 1523.924323] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12a6dff1-cb49-4615-9480-72fac81b642c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.942123] env[62405]: DEBUG nova.scheduler.client.report [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Refreshing inventories for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1523.958580] env[62405]: DEBUG nova.scheduler.client.report [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Updating ProviderTree inventory for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1523.958934] env[62405]: DEBUG nova.compute.provider_tree [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1523.973388] env[62405]: DEBUG nova.scheduler.client.report [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Refreshing aggregate associations for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7, aggregates: 74ea4ac1-15b8-4431-8ae2-c20c058298fa {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1523.997492] env[62405]: DEBUG nova.scheduler.client.report [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Refreshing trait associations for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1524.151878] env[62405]: DEBUG oslo_vmware.api [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Task: {'id': task-1946858, 'name': PowerOnVM_Task} progress is 91%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1524.412986] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe0ad4a2-322f-4c92-9864-b276e3f150a2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.422221] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62670590-db6c-4dc6-878c-3bd33fffeb54 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.463576] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18100fc9-b1ff-4a58-9236-8d8da4bc8b37 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.477547] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0aa511d2-3e60-4dec-a63c-f914e89a1610 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.494514] env[62405]: DEBUG nova.compute.provider_tree [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1524.628611] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquiring lock "900b95b5-fe5a-46c1-909a-f81b82ced0ef" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1524.628955] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Lock "900b95b5-fe5a-46c1-909a-f81b82ced0ef" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1524.659315] env[62405]: DEBUG oslo_vmware.api [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Task: {'id': task-1946858, 'name': PowerOnVM_Task, 'duration_secs': 1.519313} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1524.659315] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] [instance: 0eec4a5f-9f9b-4a86-a046-2e2d107adc48] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1524.659315] env[62405]: INFO nova.compute.manager [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] [instance: 0eec4a5f-9f9b-4a86-a046-2e2d107adc48] Took 10.65 seconds to spawn the instance on the hypervisor. [ 1524.659315] env[62405]: DEBUG nova.compute.manager [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] [instance: 0eec4a5f-9f9b-4a86-a046-2e2d107adc48] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1524.659875] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a0247b2-58d6-4184-a76a-ae9e93f09315 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.865513] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Acquiring lock "78b4c6ea-6f5b-40d8-8c4a-10332f176e0b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1524.865762] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Lock "78b4c6ea-6f5b-40d8-8c4a-10332f176e0b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1525.018271] env[62405]: ERROR nova.scheduler.client.report [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] [req-5999dee2-1e0b-46be-809e-aed69f21a1d6] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7d5eded7-a501-4fa6-b1d3-60e273d555d7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-5999dee2-1e0b-46be-809e-aed69f21a1d6"}]} [ 1525.036307] env[62405]: DEBUG nova.compute.manager [req-d6690bf0-0ecf-4384-8264-13693e62c83e req-6330fc9a-9a4e-4039-af71-89ab153403d7 service nova] [instance: 8995f9cb-8454-4a98-9090-290f87f8af18] Received event network-vif-deleted-4582fcd2-4721-4ad7-9452-5b808488dcb2 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1525.038756] env[62405]: DEBUG nova.scheduler.client.report [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Refreshing inventories for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1525.054747] env[62405]: DEBUG nova.scheduler.client.report [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Updating ProviderTree inventory for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1525.055196] env[62405]: DEBUG nova.compute.provider_tree [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1525.069033] env[62405]: DEBUG nova.scheduler.client.report [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Refreshing aggregate associations for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7, aggregates: None {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1525.089117] env[62405]: DEBUG nova.scheduler.client.report [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Refreshing trait associations for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1525.184832] env[62405]: INFO nova.compute.manager [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] [instance: 0eec4a5f-9f9b-4a86-a046-2e2d107adc48] Took 25.08 seconds to build instance. [ 1525.486010] env[62405]: DEBUG nova.network.neutron [None req-e64ab66a-be13-4796-b011-3af711f24e74 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b3647042-89a1-4d15-b85e-49a5c8def1d4] Successfully updated port: 2ced9062-28c5-4183-a8d8-397cd40c9130 {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1525.643432] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-628e51d6-ec12-4225-aa58-f4fa382d6f4c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.654891] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-623f29ea-646a-4bfc-88ac-bccd75c3356f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.690715] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7cdecf0-87bb-472e-beb6-d51337ef209a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.693274] env[62405]: DEBUG oslo_concurrency.lockutils [None req-481a0869-eade-4078-a5f3-837e6177afe0 tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Lock "0eec4a5f-9f9b-4a86-a046-2e2d107adc48" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.979s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1525.700574] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc01446e-c850-401e-bb06-9f36f7b53021 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.720120] env[62405]: DEBUG nova.compute.provider_tree [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1525.989479] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e64ab66a-be13-4796-b011-3af711f24e74 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Acquiring lock "refresh_cache-b3647042-89a1-4d15-b85e-49a5c8def1d4" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1525.989577] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e64ab66a-be13-4796-b011-3af711f24e74 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Acquired lock "refresh_cache-b3647042-89a1-4d15-b85e-49a5c8def1d4" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1525.989764] env[62405]: DEBUG nova.network.neutron [None req-e64ab66a-be13-4796-b011-3af711f24e74 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b3647042-89a1-4d15-b85e-49a5c8def1d4] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1526.196541] env[62405]: DEBUG nova.compute.manager [None req-3e4d01c4-e5d5-43fc-99ce-5a64916ac21b tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1526.254033] env[62405]: DEBUG nova.scheduler.client.report [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Updated inventory for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with generation 44 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1526.254312] env[62405]: DEBUG nova.compute.provider_tree [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Updating resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 generation from 44 to 45 during operation: update_inventory {{(pid=62405) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1526.254493] env[62405]: DEBUG nova.compute.provider_tree [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1526.446386] env[62405]: DEBUG oslo_concurrency.lockutils [None req-15791b14-6699-44c1-9737-4b1f7e10a43a tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Acquiring lock "interface-0eec4a5f-9f9b-4a86-a046-2e2d107adc48-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1526.446684] env[62405]: DEBUG oslo_concurrency.lockutils [None req-15791b14-6699-44c1-9737-4b1f7e10a43a tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Lock "interface-0eec4a5f-9f9b-4a86-a046-2e2d107adc48-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1526.447430] env[62405]: DEBUG nova.objects.instance [None req-15791b14-6699-44c1-9737-4b1f7e10a43a tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Lazy-loading 'flavor' on Instance uuid 0eec4a5f-9f9b-4a86-a046-2e2d107adc48 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1526.566940] env[62405]: DEBUG nova.network.neutron [None req-e64ab66a-be13-4796-b011-3af711f24e74 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b3647042-89a1-4d15-b85e-49a5c8def1d4] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1526.733957] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3e4d01c4-e5d5-43fc-99ce-5a64916ac21b tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1526.761121] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.923s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1526.761984] env[62405]: DEBUG nova.compute.manager [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] [instance: fbedaa93-5968-4b42-b93e-201d2b44b32b] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1526.767068] env[62405]: DEBUG oslo_concurrency.lockutils [None req-14ed0941-12b7-4219-9ade-8830ebc34cc2 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 18.457s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1526.924515] env[62405]: DEBUG nova.network.neutron [None req-e64ab66a-be13-4796-b011-3af711f24e74 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b3647042-89a1-4d15-b85e-49a5c8def1d4] Updating instance_info_cache with network_info: [{"id": "2ced9062-28c5-4183-a8d8-397cd40c9130", "address": "fa:16:3e:21:dc:a3", "network": {"id": "9e3e6e3d-df77-428f-b577-e4a42e82ec43", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.110", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "69dc3a146cd14230b1180689e2fea090", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31e77685-b4dd-4810-80ef-24115ea9ea62", "external-id": "nsx-vlan-transportzone-56", "segmentation_id": 56, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ced9062-28", "ovs_interfaceid": "2ced9062-28c5-4183-a8d8-397cd40c9130", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1526.951377] env[62405]: DEBUG nova.objects.instance [None req-15791b14-6699-44c1-9737-4b1f7e10a43a tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Lazy-loading 'pci_requests' on Instance uuid 0eec4a5f-9f9b-4a86-a046-2e2d107adc48 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1527.275286] env[62405]: INFO nova.compute.claims [None req-14ed0941-12b7-4219-9ade-8830ebc34cc2 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 058682a1-5240-4414-9203-c612ecd12999] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1527.279698] env[62405]: DEBUG nova.compute.utils [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1527.281109] env[62405]: DEBUG nova.compute.manager [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] [instance: fbedaa93-5968-4b42-b93e-201d2b44b32b] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1527.281349] env[62405]: DEBUG nova.network.neutron [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] [instance: fbedaa93-5968-4b42-b93e-201d2b44b32b] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1527.378144] env[62405]: DEBUG nova.policy [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b37af23c7fe64c42a8fcaa4faf206f91', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '73ff3070616c43658c616adb113aa50a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1527.427567] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e64ab66a-be13-4796-b011-3af711f24e74 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Releasing lock "refresh_cache-b3647042-89a1-4d15-b85e-49a5c8def1d4" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1527.428085] env[62405]: DEBUG nova.compute.manager [None req-e64ab66a-be13-4796-b011-3af711f24e74 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b3647042-89a1-4d15-b85e-49a5c8def1d4] Instance network_info: |[{"id": "2ced9062-28c5-4183-a8d8-397cd40c9130", "address": "fa:16:3e:21:dc:a3", "network": {"id": "9e3e6e3d-df77-428f-b577-e4a42e82ec43", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.110", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "69dc3a146cd14230b1180689e2fea090", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31e77685-b4dd-4810-80ef-24115ea9ea62", "external-id": "nsx-vlan-transportzone-56", "segmentation_id": 56, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ced9062-28", "ovs_interfaceid": "2ced9062-28c5-4183-a8d8-397cd40c9130", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1527.428845] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-e64ab66a-be13-4796-b011-3af711f24e74 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b3647042-89a1-4d15-b85e-49a5c8def1d4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:21:dc:a3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '31e77685-b4dd-4810-80ef-24115ea9ea62', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2ced9062-28c5-4183-a8d8-397cd40c9130', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1527.438055] env[62405]: DEBUG oslo.service.loopingcall [None req-e64ab66a-be13-4796-b011-3af711f24e74 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1527.438363] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b3647042-89a1-4d15-b85e-49a5c8def1d4] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1527.438649] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-336d6cd1-a9e5-4f89-8683-006ff0cca071 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.456217] env[62405]: DEBUG nova.objects.base [None req-15791b14-6699-44c1-9737-4b1f7e10a43a tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Object Instance<0eec4a5f-9f9b-4a86-a046-2e2d107adc48> lazy-loaded attributes: flavor,pci_requests {{(pid=62405) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1527.456441] env[62405]: DEBUG nova.network.neutron [None req-15791b14-6699-44c1-9737-4b1f7e10a43a tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] [instance: 0eec4a5f-9f9b-4a86-a046-2e2d107adc48] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1527.462031] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1527.462031] env[62405]: value = "task-1946859" [ 1527.462031] env[62405]: _type = "Task" [ 1527.462031] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1527.472685] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946859, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1527.682016] env[62405]: DEBUG oslo_concurrency.lockutils [None req-15791b14-6699-44c1-9737-4b1f7e10a43a tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Lock "interface-0eec4a5f-9f9b-4a86-a046-2e2d107adc48-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.235s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1527.785026] env[62405]: INFO nova.compute.resource_tracker [None req-14ed0941-12b7-4219-9ade-8830ebc34cc2 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 058682a1-5240-4414-9203-c612ecd12999] Updating resource usage from migration 80a37923-23e1-4b60-aaf2-72933f6694b9 [ 1527.788279] env[62405]: DEBUG nova.compute.manager [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] [instance: fbedaa93-5968-4b42-b93e-201d2b44b32b] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1527.974338] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946859, 'name': CreateVM_Task} progress is 25%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1528.160717] env[62405]: DEBUG nova.network.neutron [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] [instance: fbedaa93-5968-4b42-b93e-201d2b44b32b] Successfully created port: 19c7164f-ee95-4382-907d-6f3d78608802 {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1528.322647] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81c06f2c-6099-4b08-829f-64b6dacf91b7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.333048] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff8363fc-c35a-4e83-b3cf-d3158210d276 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.367021] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a971427d-7a8d-48db-8091-9e244b3e99b3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.375026] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b0fa95a-7a7e-451e-b055-72d9b8a003d7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.389197] env[62405]: DEBUG nova.compute.provider_tree [None req-14ed0941-12b7-4219-9ade-8830ebc34cc2 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1528.474884] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946859, 'name': CreateVM_Task, 'duration_secs': 0.681043} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1528.475068] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b3647042-89a1-4d15-b85e-49a5c8def1d4] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1528.475759] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e64ab66a-be13-4796-b011-3af711f24e74 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1528.475919] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e64ab66a-be13-4796-b011-3af711f24e74 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1528.476265] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e64ab66a-be13-4796-b011-3af711f24e74 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1528.476505] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-96d67e40-e8fb-4691-a3cd-fd5dee32f6f0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.481264] env[62405]: DEBUG oslo_vmware.api [None req-e64ab66a-be13-4796-b011-3af711f24e74 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Waiting for the task: (returnval){ [ 1528.481264] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52e694f8-b0da-d976-a47e-93c8508dd3a5" [ 1528.481264] env[62405]: _type = "Task" [ 1528.481264] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1528.490451] env[62405]: DEBUG oslo_vmware.api [None req-e64ab66a-be13-4796-b011-3af711f24e74 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52e694f8-b0da-d976-a47e-93c8508dd3a5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1528.612083] env[62405]: DEBUG nova.compute.manager [req-0d07aea6-099f-433c-97d2-147d61ae1304 req-d88b37d6-775c-4358-9000-0828ad2d9ce9 service nova] [instance: b3647042-89a1-4d15-b85e-49a5c8def1d4] Received event network-vif-plugged-2ced9062-28c5-4183-a8d8-397cd40c9130 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1528.612292] env[62405]: DEBUG oslo_concurrency.lockutils [req-0d07aea6-099f-433c-97d2-147d61ae1304 req-d88b37d6-775c-4358-9000-0828ad2d9ce9 service nova] Acquiring lock "b3647042-89a1-4d15-b85e-49a5c8def1d4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1528.612392] env[62405]: DEBUG oslo_concurrency.lockutils [req-0d07aea6-099f-433c-97d2-147d61ae1304 req-d88b37d6-775c-4358-9000-0828ad2d9ce9 service nova] Lock "b3647042-89a1-4d15-b85e-49a5c8def1d4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1528.612544] env[62405]: DEBUG oslo_concurrency.lockutils [req-0d07aea6-099f-433c-97d2-147d61ae1304 req-d88b37d6-775c-4358-9000-0828ad2d9ce9 service nova] Lock "b3647042-89a1-4d15-b85e-49a5c8def1d4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1528.612703] env[62405]: DEBUG nova.compute.manager [req-0d07aea6-099f-433c-97d2-147d61ae1304 req-d88b37d6-775c-4358-9000-0828ad2d9ce9 service nova] [instance: b3647042-89a1-4d15-b85e-49a5c8def1d4] No waiting events found dispatching network-vif-plugged-2ced9062-28c5-4183-a8d8-397cd40c9130 {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1528.612866] env[62405]: WARNING nova.compute.manager [req-0d07aea6-099f-433c-97d2-147d61ae1304 req-d88b37d6-775c-4358-9000-0828ad2d9ce9 service nova] [instance: b3647042-89a1-4d15-b85e-49a5c8def1d4] Received unexpected event network-vif-plugged-2ced9062-28c5-4183-a8d8-397cd40c9130 for instance with vm_state building and task_state spawning. [ 1528.613068] env[62405]: DEBUG nova.compute.manager [req-0d07aea6-099f-433c-97d2-147d61ae1304 req-d88b37d6-775c-4358-9000-0828ad2d9ce9 service nova] [instance: 792cd2c8-a67d-4b16-93ab-722fcc8b622d] Received event network-changed-c1f4fd8a-cda2-4206-b706-58f6fa8c722e {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1528.613261] env[62405]: DEBUG nova.compute.manager [req-0d07aea6-099f-433c-97d2-147d61ae1304 req-d88b37d6-775c-4358-9000-0828ad2d9ce9 service nova] [instance: 792cd2c8-a67d-4b16-93ab-722fcc8b622d] Refreshing instance network info cache due to event network-changed-c1f4fd8a-cda2-4206-b706-58f6fa8c722e. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1528.613442] env[62405]: DEBUG oslo_concurrency.lockutils [req-0d07aea6-099f-433c-97d2-147d61ae1304 req-d88b37d6-775c-4358-9000-0828ad2d9ce9 service nova] Acquiring lock "refresh_cache-792cd2c8-a67d-4b16-93ab-722fcc8b622d" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1528.613576] env[62405]: DEBUG oslo_concurrency.lockutils [req-0d07aea6-099f-433c-97d2-147d61ae1304 req-d88b37d6-775c-4358-9000-0828ad2d9ce9 service nova] Acquired lock "refresh_cache-792cd2c8-a67d-4b16-93ab-722fcc8b622d" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1528.613726] env[62405]: DEBUG nova.network.neutron [req-0d07aea6-099f-433c-97d2-147d61ae1304 req-d88b37d6-775c-4358-9000-0828ad2d9ce9 service nova] [instance: 792cd2c8-a67d-4b16-93ab-722fcc8b622d] Refreshing network info cache for port c1f4fd8a-cda2-4206-b706-58f6fa8c722e {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1528.801093] env[62405]: DEBUG nova.compute.manager [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] [instance: fbedaa93-5968-4b42-b93e-201d2b44b32b] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1528.833483] env[62405]: DEBUG nova.virt.hardware [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1528.833740] env[62405]: DEBUG nova.virt.hardware [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1528.833896] env[62405]: DEBUG nova.virt.hardware [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1528.834119] env[62405]: DEBUG nova.virt.hardware [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1528.834392] env[62405]: DEBUG nova.virt.hardware [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1528.834584] env[62405]: DEBUG nova.virt.hardware [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1528.835091] env[62405]: DEBUG nova.virt.hardware [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1528.835546] env[62405]: DEBUG nova.virt.hardware [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1528.835546] env[62405]: DEBUG nova.virt.hardware [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1528.835824] env[62405]: DEBUG nova.virt.hardware [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1528.838757] env[62405]: DEBUG nova.virt.hardware [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1528.838757] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82ce31b4-7588-42ac-a4e8-e0c5df4b4581 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.850476] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-393e9226-eba6-483f-a197-7a67c163b311 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.891947] env[62405]: DEBUG nova.scheduler.client.report [None req-14ed0941-12b7-4219-9ade-8830ebc34cc2 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1529.018123] env[62405]: DEBUG oslo_vmware.api [None req-e64ab66a-be13-4796-b011-3af711f24e74 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52e694f8-b0da-d976-a47e-93c8508dd3a5, 'name': SearchDatastore_Task, 'duration_secs': 0.016847} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1529.018123] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e64ab66a-be13-4796-b011-3af711f24e74 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1529.018123] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-e64ab66a-be13-4796-b011-3af711f24e74 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b3647042-89a1-4d15-b85e-49a5c8def1d4] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1529.018123] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e64ab66a-be13-4796-b011-3af711f24e74 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1529.018329] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e64ab66a-be13-4796-b011-3af711f24e74 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1529.018329] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-e64ab66a-be13-4796-b011-3af711f24e74 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1529.018329] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-92c49441-9c01-4503-926e-2ad447e4d863 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.018329] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-e64ab66a-be13-4796-b011-3af711f24e74 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1529.018329] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-e64ab66a-be13-4796-b011-3af711f24e74 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1529.027601] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b0086940-fafd-4402-bf28-3e1dc33e7823 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.029095] env[62405]: DEBUG oslo_vmware.api [None req-e64ab66a-be13-4796-b011-3af711f24e74 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Waiting for the task: (returnval){ [ 1529.029095] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52ec5b02-d939-6025-89d1-2075203686cc" [ 1529.029095] env[62405]: _type = "Task" [ 1529.029095] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1529.041018] env[62405]: DEBUG oslo_vmware.api [None req-e64ab66a-be13-4796-b011-3af711f24e74 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52ec5b02-d939-6025-89d1-2075203686cc, 'name': SearchDatastore_Task, 'duration_secs': 0.012994} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1529.042728] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-75bc037e-3636-478e-8560-7c491ff975a4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.050452] env[62405]: DEBUG oslo_vmware.api [None req-e64ab66a-be13-4796-b011-3af711f24e74 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Waiting for the task: (returnval){ [ 1529.050452] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5230a0fb-1aa5-5ff9-8224-00d412c6ef02" [ 1529.050452] env[62405]: _type = "Task" [ 1529.050452] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1529.070018] env[62405]: DEBUG oslo_vmware.api [None req-e64ab66a-be13-4796-b011-3af711f24e74 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5230a0fb-1aa5-5ff9-8224-00d412c6ef02, 'name': SearchDatastore_Task, 'duration_secs': 0.010944} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1529.070018] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e64ab66a-be13-4796-b011-3af711f24e74 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1529.070018] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-e64ab66a-be13-4796-b011-3af711f24e74 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] b3647042-89a1-4d15-b85e-49a5c8def1d4/b3647042-89a1-4d15-b85e-49a5c8def1d4.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1529.070018] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a0a6a720-a7d4-4a4c-903a-459e9a4dd36f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.083016] env[62405]: DEBUG oslo_vmware.api [None req-e64ab66a-be13-4796-b011-3af711f24e74 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Waiting for the task: (returnval){ [ 1529.083016] env[62405]: value = "task-1946860" [ 1529.083016] env[62405]: _type = "Task" [ 1529.083016] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1529.095962] env[62405]: DEBUG oslo_vmware.api [None req-e64ab66a-be13-4796-b011-3af711f24e74 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': task-1946860, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1529.397593] env[62405]: DEBUG oslo_concurrency.lockutils [None req-14ed0941-12b7-4219-9ade-8830ebc34cc2 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.631s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1529.397869] env[62405]: INFO nova.compute.manager [None req-14ed0941-12b7-4219-9ade-8830ebc34cc2 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 058682a1-5240-4414-9203-c612ecd12999] Migrating [ 1529.398249] env[62405]: DEBUG oslo_concurrency.lockutils [None req-14ed0941-12b7-4219-9ade-8830ebc34cc2 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Acquiring lock "compute-rpcapi-router" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1529.398399] env[62405]: DEBUG oslo_concurrency.lockutils [None req-14ed0941-12b7-4219-9ade-8830ebc34cc2 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Acquired lock "compute-rpcapi-router" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1529.399911] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7bb24fb1-ca55-4d29-81b1-3d8c80e2f6fe tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.337s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1529.400122] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7bb24fb1-ca55-4d29-81b1-3d8c80e2f6fe tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1529.402518] env[62405]: DEBUG oslo_concurrency.lockutils [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.552s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1529.404692] env[62405]: INFO nova.compute.claims [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [instance: 777ddb84-25b9-4da6-be6b-a2289dbf510a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1529.408859] env[62405]: INFO nova.compute.rpcapi [None req-14ed0941-12b7-4219-9ade-8830ebc34cc2 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Automatically selected compute RPC version 6.4 from minimum service version 68 [ 1529.409381] env[62405]: DEBUG oslo_concurrency.lockutils [None req-14ed0941-12b7-4219-9ade-8830ebc34cc2 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Releasing lock "compute-rpcapi-router" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1529.454335] env[62405]: INFO nova.scheduler.client.report [None req-7bb24fb1-ca55-4d29-81b1-3d8c80e2f6fe tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Deleted allocations for instance 3c0b964f-c900-4704-ae12-7eba7952f678 [ 1529.564914] env[62405]: DEBUG oslo_concurrency.lockutils [None req-47ff8dfd-ab05-4f7c-b87a-2b9d28fef598 tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Acquiring lock "6199de01-baca-4461-9572-111eda11adac" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1529.565347] env[62405]: DEBUG oslo_concurrency.lockutils [None req-47ff8dfd-ab05-4f7c-b87a-2b9d28fef598 tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Lock "6199de01-baca-4461-9572-111eda11adac" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.003s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1529.565720] env[62405]: DEBUG oslo_concurrency.lockutils [None req-47ff8dfd-ab05-4f7c-b87a-2b9d28fef598 tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Acquiring lock "6199de01-baca-4461-9572-111eda11adac-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1529.565939] env[62405]: DEBUG oslo_concurrency.lockutils [None req-47ff8dfd-ab05-4f7c-b87a-2b9d28fef598 tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Lock "6199de01-baca-4461-9572-111eda11adac-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1529.566139] env[62405]: DEBUG oslo_concurrency.lockutils [None req-47ff8dfd-ab05-4f7c-b87a-2b9d28fef598 tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Lock "6199de01-baca-4461-9572-111eda11adac-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1529.568625] env[62405]: INFO nova.compute.manager [None req-47ff8dfd-ab05-4f7c-b87a-2b9d28fef598 tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] [instance: 6199de01-baca-4461-9572-111eda11adac] Terminating instance [ 1529.604782] env[62405]: DEBUG oslo_vmware.api [None req-e64ab66a-be13-4796-b011-3af711f24e74 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': task-1946860, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1529.722467] env[62405]: DEBUG nova.network.neutron [req-0d07aea6-099f-433c-97d2-147d61ae1304 req-d88b37d6-775c-4358-9000-0828ad2d9ce9 service nova] [instance: 792cd2c8-a67d-4b16-93ab-722fcc8b622d] Updated VIF entry in instance network info cache for port c1f4fd8a-cda2-4206-b706-58f6fa8c722e. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1529.722467] env[62405]: DEBUG nova.network.neutron [req-0d07aea6-099f-433c-97d2-147d61ae1304 req-d88b37d6-775c-4358-9000-0828ad2d9ce9 service nova] [instance: 792cd2c8-a67d-4b16-93ab-722fcc8b622d] Updating instance_info_cache with network_info: [{"id": "c1f4fd8a-cda2-4206-b706-58f6fa8c722e", "address": "fa:16:3e:97:6f:79", "network": {"id": "5289661e-534d-40b0-8a99-75fc7c9da4f3", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1070103644-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.138", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fe2966f5756b424fbfbca6677e4d948a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9e6b7d9f-c4e9-4623-9eb5-840ca1a8224c", "external-id": "nsx-vlan-transportzone-782", "segmentation_id": 782, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc1f4fd8a-cd", "ovs_interfaceid": "c1f4fd8a-cda2-4206-b706-58f6fa8c722e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1529.933891] env[62405]: DEBUG oslo_concurrency.lockutils [None req-14ed0941-12b7-4219-9ade-8830ebc34cc2 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Acquiring lock "refresh_cache-058682a1-5240-4414-9203-c612ecd12999" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1529.934217] env[62405]: DEBUG oslo_concurrency.lockutils [None req-14ed0941-12b7-4219-9ade-8830ebc34cc2 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Acquired lock "refresh_cache-058682a1-5240-4414-9203-c612ecd12999" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1529.934321] env[62405]: DEBUG nova.network.neutron [None req-14ed0941-12b7-4219-9ade-8830ebc34cc2 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 058682a1-5240-4414-9203-c612ecd12999] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1529.966744] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7bb24fb1-ca55-4d29-81b1-3d8c80e2f6fe tempest-TenantUsagesTestJSON-546661799 tempest-TenantUsagesTestJSON-546661799-project-member] Lock "3c0b964f-c900-4704-ae12-7eba7952f678" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.955s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1530.073537] env[62405]: DEBUG nova.compute.manager [None req-47ff8dfd-ab05-4f7c-b87a-2b9d28fef598 tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] [instance: 6199de01-baca-4461-9572-111eda11adac] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1530.073757] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-47ff8dfd-ab05-4f7c-b87a-2b9d28fef598 tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] [instance: 6199de01-baca-4461-9572-111eda11adac] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1530.074844] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47a8131f-baff-4cd7-bedf-023b32148e83 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.083479] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-47ff8dfd-ab05-4f7c-b87a-2b9d28fef598 tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] [instance: 6199de01-baca-4461-9572-111eda11adac] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1530.083726] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-12e23975-63fc-4178-9e9d-f87a3adea459 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.094740] env[62405]: DEBUG oslo_vmware.api [None req-e64ab66a-be13-4796-b011-3af711f24e74 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': task-1946860, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.598152} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1530.096600] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-e64ab66a-be13-4796-b011-3af711f24e74 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] b3647042-89a1-4d15-b85e-49a5c8def1d4/b3647042-89a1-4d15-b85e-49a5c8def1d4.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1530.097018] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-e64ab66a-be13-4796-b011-3af711f24e74 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b3647042-89a1-4d15-b85e-49a5c8def1d4] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1530.097146] env[62405]: DEBUG oslo_vmware.api [None req-47ff8dfd-ab05-4f7c-b87a-2b9d28fef598 tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Waiting for the task: (returnval){ [ 1530.097146] env[62405]: value = "task-1946861" [ 1530.097146] env[62405]: _type = "Task" [ 1530.097146] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1530.097325] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-eb82fee5-b13d-4438-b730-935356d90ca3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.111827] env[62405]: DEBUG oslo_vmware.api [None req-47ff8dfd-ab05-4f7c-b87a-2b9d28fef598 tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Task: {'id': task-1946861, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1530.114052] env[62405]: DEBUG oslo_vmware.api [None req-e64ab66a-be13-4796-b011-3af711f24e74 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Waiting for the task: (returnval){ [ 1530.114052] env[62405]: value = "task-1946862" [ 1530.114052] env[62405]: _type = "Task" [ 1530.114052] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1530.124754] env[62405]: DEBUG oslo_vmware.api [None req-e64ab66a-be13-4796-b011-3af711f24e74 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': task-1946862, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1530.225622] env[62405]: DEBUG oslo_concurrency.lockutils [req-0d07aea6-099f-433c-97d2-147d61ae1304 req-d88b37d6-775c-4358-9000-0828ad2d9ce9 service nova] Releasing lock "refresh_cache-792cd2c8-a67d-4b16-93ab-722fcc8b622d" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1530.225918] env[62405]: DEBUG nova.compute.manager [req-0d07aea6-099f-433c-97d2-147d61ae1304 req-d88b37d6-775c-4358-9000-0828ad2d9ce9 service nova] [instance: b3647042-89a1-4d15-b85e-49a5c8def1d4] Received event network-changed-2ced9062-28c5-4183-a8d8-397cd40c9130 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1530.226074] env[62405]: DEBUG nova.compute.manager [req-0d07aea6-099f-433c-97d2-147d61ae1304 req-d88b37d6-775c-4358-9000-0828ad2d9ce9 service nova] [instance: b3647042-89a1-4d15-b85e-49a5c8def1d4] Refreshing instance network info cache due to event network-changed-2ced9062-28c5-4183-a8d8-397cd40c9130. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1530.226300] env[62405]: DEBUG oslo_concurrency.lockutils [req-0d07aea6-099f-433c-97d2-147d61ae1304 req-d88b37d6-775c-4358-9000-0828ad2d9ce9 service nova] Acquiring lock "refresh_cache-b3647042-89a1-4d15-b85e-49a5c8def1d4" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1530.226444] env[62405]: DEBUG oslo_concurrency.lockutils [req-0d07aea6-099f-433c-97d2-147d61ae1304 req-d88b37d6-775c-4358-9000-0828ad2d9ce9 service nova] Acquired lock "refresh_cache-b3647042-89a1-4d15-b85e-49a5c8def1d4" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1530.226613] env[62405]: DEBUG nova.network.neutron [req-0d07aea6-099f-433c-97d2-147d61ae1304 req-d88b37d6-775c-4358-9000-0828ad2d9ce9 service nova] [instance: b3647042-89a1-4d15-b85e-49a5c8def1d4] Refreshing network info cache for port 2ced9062-28c5-4183-a8d8-397cd40c9130 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1530.356572] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f5ea0adf-5d7b-453b-be2f-0de070bac2fe tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Acquiring lock "0eec4a5f-9f9b-4a86-a046-2e2d107adc48" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1530.356798] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f5ea0adf-5d7b-453b-be2f-0de070bac2fe tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Lock "0eec4a5f-9f9b-4a86-a046-2e2d107adc48" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1530.356968] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f5ea0adf-5d7b-453b-be2f-0de070bac2fe tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Acquiring lock "0eec4a5f-9f9b-4a86-a046-2e2d107adc48-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1530.357221] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f5ea0adf-5d7b-453b-be2f-0de070bac2fe tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Lock "0eec4a5f-9f9b-4a86-a046-2e2d107adc48-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1530.357362] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f5ea0adf-5d7b-453b-be2f-0de070bac2fe tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Lock "0eec4a5f-9f9b-4a86-a046-2e2d107adc48-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1530.362016] env[62405]: INFO nova.compute.manager [None req-f5ea0adf-5d7b-453b-be2f-0de070bac2fe tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] [instance: 0eec4a5f-9f9b-4a86-a046-2e2d107adc48] Terminating instance [ 1530.439187] env[62405]: DEBUG nova.network.neutron [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] [instance: fbedaa93-5968-4b42-b93e-201d2b44b32b] Successfully updated port: 19c7164f-ee95-4382-907d-6f3d78608802 {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1530.617932] env[62405]: DEBUG oslo_vmware.api [None req-47ff8dfd-ab05-4f7c-b87a-2b9d28fef598 tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Task: {'id': task-1946861, 'name': PowerOffVM_Task, 'duration_secs': 0.212572} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1530.622070] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-47ff8dfd-ab05-4f7c-b87a-2b9d28fef598 tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] [instance: 6199de01-baca-4461-9572-111eda11adac] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1530.622338] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-47ff8dfd-ab05-4f7c-b87a-2b9d28fef598 tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] [instance: 6199de01-baca-4461-9572-111eda11adac] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1530.623022] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e75c6ecf-0361-4deb-9780-031c37b9dea1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.636036] env[62405]: DEBUG oslo_vmware.api [None req-e64ab66a-be13-4796-b011-3af711f24e74 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': task-1946862, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.088482} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1530.636330] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-e64ab66a-be13-4796-b011-3af711f24e74 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b3647042-89a1-4d15-b85e-49a5c8def1d4] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1530.637363] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30c67d01-0bba-45dc-a0dc-11b0e30a617c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.670988] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-e64ab66a-be13-4796-b011-3af711f24e74 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b3647042-89a1-4d15-b85e-49a5c8def1d4] Reconfiguring VM instance instance-00000013 to attach disk [datastore1] b3647042-89a1-4d15-b85e-49a5c8def1d4/b3647042-89a1-4d15-b85e-49a5c8def1d4.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1530.677273] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4736d713-4ff1-461c-b834-c4440c0c08ec {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.702320] env[62405]: DEBUG oslo_vmware.api [None req-e64ab66a-be13-4796-b011-3af711f24e74 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Waiting for the task: (returnval){ [ 1530.702320] env[62405]: value = "task-1946864" [ 1530.702320] env[62405]: _type = "Task" [ 1530.702320] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1530.714204] env[62405]: DEBUG oslo_vmware.api [None req-e64ab66a-be13-4796-b011-3af711f24e74 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': task-1946864, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1530.746546] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-47ff8dfd-ab05-4f7c-b87a-2b9d28fef598 tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] [instance: 6199de01-baca-4461-9572-111eda11adac] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1530.746884] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-47ff8dfd-ab05-4f7c-b87a-2b9d28fef598 tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] [instance: 6199de01-baca-4461-9572-111eda11adac] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1530.747180] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-47ff8dfd-ab05-4f7c-b87a-2b9d28fef598 tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Deleting the datastore file [datastore1] 6199de01-baca-4461-9572-111eda11adac {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1530.747663] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3743b2dc-7dd3-4039-9df8-0b00c4bb501a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.756890] env[62405]: DEBUG oslo_vmware.api [None req-47ff8dfd-ab05-4f7c-b87a-2b9d28fef598 tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Waiting for the task: (returnval){ [ 1530.756890] env[62405]: value = "task-1946865" [ 1530.756890] env[62405]: _type = "Task" [ 1530.756890] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1530.767054] env[62405]: DEBUG oslo_vmware.api [None req-47ff8dfd-ab05-4f7c-b87a-2b9d28fef598 tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Task: {'id': task-1946865, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1530.866295] env[62405]: DEBUG nova.compute.manager [None req-f5ea0adf-5d7b-453b-be2f-0de070bac2fe tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] [instance: 0eec4a5f-9f9b-4a86-a046-2e2d107adc48] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1530.866516] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-f5ea0adf-5d7b-453b-be2f-0de070bac2fe tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] [instance: 0eec4a5f-9f9b-4a86-a046-2e2d107adc48] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1530.867413] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76fa807c-14e5-4250-9ec1-2a16ea2bb355 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.880969] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5ea0adf-5d7b-453b-be2f-0de070bac2fe tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] [instance: 0eec4a5f-9f9b-4a86-a046-2e2d107adc48] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1530.881715] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ff7ba4b4-b239-4040-9490-4bdc0809fe50 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.892304] env[62405]: DEBUG oslo_vmware.api [None req-f5ea0adf-5d7b-453b-be2f-0de070bac2fe tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Waiting for the task: (returnval){ [ 1530.892304] env[62405]: value = "task-1946866" [ 1530.892304] env[62405]: _type = "Task" [ 1530.892304] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1530.902847] env[62405]: DEBUG oslo_vmware.api [None req-f5ea0adf-5d7b-453b-be2f-0de070bac2fe tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Task: {'id': task-1946866, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1530.944140] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Acquiring lock "refresh_cache-fbedaa93-5968-4b42-b93e-201d2b44b32b" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1530.944140] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Acquired lock "refresh_cache-fbedaa93-5968-4b42-b93e-201d2b44b32b" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1530.944140] env[62405]: DEBUG nova.network.neutron [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] [instance: fbedaa93-5968-4b42-b93e-201d2b44b32b] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1531.003915] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32fc6d09-71fe-44fd-ab02-c9b220a9a903 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.018758] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-696b8350-6e72-4db5-b4bb-ddb2bf56d7b1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.807411] env[62405]: DEBUG nova.network.neutron [None req-14ed0941-12b7-4219-9ade-8830ebc34cc2 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 058682a1-5240-4414-9203-c612ecd12999] Updating instance_info_cache with network_info: [{"id": "f2f99aa3-770a-41cb-bb49-775f9f0f2708", "address": "fa:16:3e:ed:e3:4c", "network": {"id": "9e3e6e3d-df77-428f-b577-e4a42e82ec43", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.165", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "69dc3a146cd14230b1180689e2fea090", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31e77685-b4dd-4810-80ef-24115ea9ea62", "external-id": "nsx-vlan-transportzone-56", "segmentation_id": 56, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf2f99aa3-77", "ovs_interfaceid": "f2f99aa3-770a-41cb-bb49-775f9f0f2708", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1531.816340] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0894f2f-8cfc-4f80-be13-efe3f6720b28 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.819480] env[62405]: DEBUG nova.compute.manager [req-d338e8e7-8a91-424c-b667-b19e58901850 req-355110ac-6a41-472c-a75b-a3e016e0b3e7 service nova] [instance: fbedaa93-5968-4b42-b93e-201d2b44b32b] Received event network-vif-plugged-19c7164f-ee95-4382-907d-6f3d78608802 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1531.819683] env[62405]: DEBUG oslo_concurrency.lockutils [req-d338e8e7-8a91-424c-b667-b19e58901850 req-355110ac-6a41-472c-a75b-a3e016e0b3e7 service nova] Acquiring lock "fbedaa93-5968-4b42-b93e-201d2b44b32b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1531.819885] env[62405]: DEBUG oslo_concurrency.lockutils [req-d338e8e7-8a91-424c-b667-b19e58901850 req-355110ac-6a41-472c-a75b-a3e016e0b3e7 service nova] Lock "fbedaa93-5968-4b42-b93e-201d2b44b32b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1531.820182] env[62405]: DEBUG oslo_concurrency.lockutils [req-d338e8e7-8a91-424c-b667-b19e58901850 req-355110ac-6a41-472c-a75b-a3e016e0b3e7 service nova] Lock "fbedaa93-5968-4b42-b93e-201d2b44b32b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1531.820237] env[62405]: DEBUG nova.compute.manager [req-d338e8e7-8a91-424c-b667-b19e58901850 req-355110ac-6a41-472c-a75b-a3e016e0b3e7 service nova] [instance: fbedaa93-5968-4b42-b93e-201d2b44b32b] No waiting events found dispatching network-vif-plugged-19c7164f-ee95-4382-907d-6f3d78608802 {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1531.820417] env[62405]: WARNING nova.compute.manager [req-d338e8e7-8a91-424c-b667-b19e58901850 req-355110ac-6a41-472c-a75b-a3e016e0b3e7 service nova] [instance: fbedaa93-5968-4b42-b93e-201d2b44b32b] Received unexpected event network-vif-plugged-19c7164f-ee95-4382-907d-6f3d78608802 for instance with vm_state building and task_state spawning. [ 1531.821764] env[62405]: DEBUG nova.compute.manager [req-d338e8e7-8a91-424c-b667-b19e58901850 req-355110ac-6a41-472c-a75b-a3e016e0b3e7 service nova] [instance: fbedaa93-5968-4b42-b93e-201d2b44b32b] Received event network-changed-19c7164f-ee95-4382-907d-6f3d78608802 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1531.821764] env[62405]: DEBUG nova.compute.manager [req-d338e8e7-8a91-424c-b667-b19e58901850 req-355110ac-6a41-472c-a75b-a3e016e0b3e7 service nova] [instance: fbedaa93-5968-4b42-b93e-201d2b44b32b] Refreshing instance network info cache due to event network-changed-19c7164f-ee95-4382-907d-6f3d78608802. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1531.821764] env[62405]: DEBUG oslo_concurrency.lockutils [req-d338e8e7-8a91-424c-b667-b19e58901850 req-355110ac-6a41-472c-a75b-a3e016e0b3e7 service nova] Acquiring lock "refresh_cache-fbedaa93-5968-4b42-b93e-201d2b44b32b" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1531.827601] env[62405]: DEBUG oslo_vmware.api [None req-e64ab66a-be13-4796-b011-3af711f24e74 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': task-1946864, 'name': ReconfigVM_Task, 'duration_secs': 0.333175} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1531.835795] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-e64ab66a-be13-4796-b011-3af711f24e74 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b3647042-89a1-4d15-b85e-49a5c8def1d4] Reconfigured VM instance instance-00000013 to attach disk [datastore1] b3647042-89a1-4d15-b85e-49a5c8def1d4/b3647042-89a1-4d15-b85e-49a5c8def1d4.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1531.836997] env[62405]: DEBUG oslo_vmware.api [None req-47ff8dfd-ab05-4f7c-b87a-2b9d28fef598 tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Task: {'id': task-1946865, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.210811} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1531.837295] env[62405]: DEBUG oslo_vmware.api [None req-f5ea0adf-5d7b-453b-be2f-0de070bac2fe tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Task: {'id': task-1946866, 'name': PowerOffVM_Task, 'duration_secs': 0.263205} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1531.837505] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d60115ec-bffd-4646-b8aa-32adfa4a5e68 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.839214] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-47ff8dfd-ab05-4f7c-b87a-2b9d28fef598 tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1531.839415] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-47ff8dfd-ab05-4f7c-b87a-2b9d28fef598 tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] [instance: 6199de01-baca-4461-9572-111eda11adac] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1531.839634] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-47ff8dfd-ab05-4f7c-b87a-2b9d28fef598 tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] [instance: 6199de01-baca-4461-9572-111eda11adac] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1531.839831] env[62405]: INFO nova.compute.manager [None req-47ff8dfd-ab05-4f7c-b87a-2b9d28fef598 tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] [instance: 6199de01-baca-4461-9572-111eda11adac] Took 1.77 seconds to destroy the instance on the hypervisor. [ 1531.840095] env[62405]: DEBUG oslo.service.loopingcall [None req-47ff8dfd-ab05-4f7c-b87a-2b9d28fef598 tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1531.841264] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a90d3b5f-1984-434d-aec3-9e48de707f9b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.845287] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5ea0adf-5d7b-453b-be2f-0de070bac2fe tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] [instance: 0eec4a5f-9f9b-4a86-a046-2e2d107adc48] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1531.845477] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-f5ea0adf-5d7b-453b-be2f-0de070bac2fe tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] [instance: 0eec4a5f-9f9b-4a86-a046-2e2d107adc48] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1531.846082] env[62405]: DEBUG nova.compute.manager [-] [instance: 6199de01-baca-4461-9572-111eda11adac] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1531.846082] env[62405]: DEBUG nova.network.neutron [-] [instance: 6199de01-baca-4461-9572-111eda11adac] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1531.847858] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9d79a9f0-63e7-453a-b29e-6c8d306ac723 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.860609] env[62405]: DEBUG nova.compute.provider_tree [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1531.865332] env[62405]: DEBUG oslo_vmware.api [None req-e64ab66a-be13-4796-b011-3af711f24e74 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Waiting for the task: (returnval){ [ 1531.865332] env[62405]: value = "task-1946867" [ 1531.865332] env[62405]: _type = "Task" [ 1531.865332] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1531.876628] env[62405]: DEBUG oslo_vmware.api [None req-e64ab66a-be13-4796-b011-3af711f24e74 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': task-1946867, 'name': Rename_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1531.939341] env[62405]: DEBUG nova.network.neutron [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] [instance: fbedaa93-5968-4b42-b93e-201d2b44b32b] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1531.970724] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-f5ea0adf-5d7b-453b-be2f-0de070bac2fe tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] [instance: 0eec4a5f-9f9b-4a86-a046-2e2d107adc48] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1531.971023] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-f5ea0adf-5d7b-453b-be2f-0de070bac2fe tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] [instance: 0eec4a5f-9f9b-4a86-a046-2e2d107adc48] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1531.971858] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-f5ea0adf-5d7b-453b-be2f-0de070bac2fe tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Deleting the datastore file [datastore1] 0eec4a5f-9f9b-4a86-a046-2e2d107adc48 {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1531.971858] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f696e439-7539-4d10-b102-2482e35b8124 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.981015] env[62405]: DEBUG oslo_vmware.api [None req-f5ea0adf-5d7b-453b-be2f-0de070bac2fe tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Waiting for the task: (returnval){ [ 1531.981015] env[62405]: value = "task-1946869" [ 1531.981015] env[62405]: _type = "Task" [ 1531.981015] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1531.995312] env[62405]: DEBUG oslo_vmware.api [None req-f5ea0adf-5d7b-453b-be2f-0de070bac2fe tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Task: {'id': task-1946869, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1532.323707] env[62405]: DEBUG oslo_concurrency.lockutils [None req-14ed0941-12b7-4219-9ade-8830ebc34cc2 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Releasing lock "refresh_cache-058682a1-5240-4414-9203-c612ecd12999" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1532.380282] env[62405]: DEBUG oslo_vmware.api [None req-e64ab66a-be13-4796-b011-3af711f24e74 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': task-1946867, 'name': Rename_Task, 'duration_secs': 0.168853} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1532.380787] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-e64ab66a-be13-4796-b011-3af711f24e74 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b3647042-89a1-4d15-b85e-49a5c8def1d4] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1532.381892] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bcb13859-7e3a-460e-a6d8-f1f98a2991cd {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.396289] env[62405]: ERROR nova.scheduler.client.report [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [req-1ac7b23a-224b-40f2-9945-c34bf0452b5b] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7d5eded7-a501-4fa6-b1d3-60e273d555d7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-1ac7b23a-224b-40f2-9945-c34bf0452b5b"}]} [ 1532.400333] env[62405]: DEBUG oslo_vmware.api [None req-e64ab66a-be13-4796-b011-3af711f24e74 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Waiting for the task: (returnval){ [ 1532.400333] env[62405]: value = "task-1946870" [ 1532.400333] env[62405]: _type = "Task" [ 1532.400333] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1532.409266] env[62405]: DEBUG oslo_vmware.api [None req-e64ab66a-be13-4796-b011-3af711f24e74 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': task-1946870, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1532.417772] env[62405]: DEBUG nova.scheduler.client.report [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Refreshing inventories for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1532.432426] env[62405]: DEBUG nova.scheduler.client.report [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Updating ProviderTree inventory for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1532.432565] env[62405]: DEBUG nova.compute.provider_tree [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1532.445713] env[62405]: DEBUG nova.scheduler.client.report [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Refreshing aggregate associations for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7, aggregates: None {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1532.472626] env[62405]: DEBUG nova.scheduler.client.report [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Refreshing trait associations for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1532.494402] env[62405]: DEBUG oslo_vmware.api [None req-f5ea0adf-5d7b-453b-be2f-0de070bac2fe tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Task: {'id': task-1946869, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.158879} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1532.494687] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-f5ea0adf-5d7b-453b-be2f-0de070bac2fe tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1532.494868] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-f5ea0adf-5d7b-453b-be2f-0de070bac2fe tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] [instance: 0eec4a5f-9f9b-4a86-a046-2e2d107adc48] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1532.495052] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-f5ea0adf-5d7b-453b-be2f-0de070bac2fe tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] [instance: 0eec4a5f-9f9b-4a86-a046-2e2d107adc48] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1532.495226] env[62405]: INFO nova.compute.manager [None req-f5ea0adf-5d7b-453b-be2f-0de070bac2fe tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] [instance: 0eec4a5f-9f9b-4a86-a046-2e2d107adc48] Took 1.63 seconds to destroy the instance on the hypervisor. [ 1532.495574] env[62405]: DEBUG oslo.service.loopingcall [None req-f5ea0adf-5d7b-453b-be2f-0de070bac2fe tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1532.495735] env[62405]: DEBUG nova.compute.manager [-] [instance: 0eec4a5f-9f9b-4a86-a046-2e2d107adc48] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1532.495828] env[62405]: DEBUG nova.network.neutron [-] [instance: 0eec4a5f-9f9b-4a86-a046-2e2d107adc48] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1532.739014] env[62405]: DEBUG nova.network.neutron [req-0d07aea6-099f-433c-97d2-147d61ae1304 req-d88b37d6-775c-4358-9000-0828ad2d9ce9 service nova] [instance: b3647042-89a1-4d15-b85e-49a5c8def1d4] Updated VIF entry in instance network info cache for port 2ced9062-28c5-4183-a8d8-397cd40c9130. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1532.739453] env[62405]: DEBUG nova.network.neutron [req-0d07aea6-099f-433c-97d2-147d61ae1304 req-d88b37d6-775c-4358-9000-0828ad2d9ce9 service nova] [instance: b3647042-89a1-4d15-b85e-49a5c8def1d4] Updating instance_info_cache with network_info: [{"id": "2ced9062-28c5-4183-a8d8-397cd40c9130", "address": "fa:16:3e:21:dc:a3", "network": {"id": "9e3e6e3d-df77-428f-b577-e4a42e82ec43", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.110", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "69dc3a146cd14230b1180689e2fea090", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31e77685-b4dd-4810-80ef-24115ea9ea62", "external-id": "nsx-vlan-transportzone-56", "segmentation_id": 56, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ced9062-28", "ovs_interfaceid": "2ced9062-28c5-4183-a8d8-397cd40c9130", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1532.768926] env[62405]: DEBUG oslo_concurrency.lockutils [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Acquiring lock "9e73e2ab-1eac-4aca-905f-a8391d3f5a9b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1532.769689] env[62405]: DEBUG oslo_concurrency.lockutils [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Lock "9e73e2ab-1eac-4aca-905f-a8391d3f5a9b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1532.793510] env[62405]: DEBUG nova.network.neutron [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] [instance: fbedaa93-5968-4b42-b93e-201d2b44b32b] Updating instance_info_cache with network_info: [{"id": "19c7164f-ee95-4382-907d-6f3d78608802", "address": "fa:16:3e:1a:f2:11", "network": {"id": "b7d514ef-30bf-4d0c-b3fb-5eb207a7ce7d", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-370465885-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "73ff3070616c43658c616adb113aa50a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e350f83a-f581-4e10-ac16-0b0f7bfd3d38", "external-id": "nsx-vlan-transportzone-834", "segmentation_id": 834, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap19c7164f-ee", "ovs_interfaceid": "19c7164f-ee95-4382-907d-6f3d78608802", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1532.911434] env[62405]: DEBUG oslo_vmware.api [None req-e64ab66a-be13-4796-b011-3af711f24e74 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': task-1946870, 'name': PowerOnVM_Task} progress is 37%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1532.960997] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bee27620-db5d-4622-a185-6d86a3e478f2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.971849] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c28331c-65ae-471e-8e24-34c11229fdc4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.010189] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6e3f99d-b3a2-4db9-b7eb-f51b184d0b03 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.019440] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6941fb12-a6b0-4dd4-980b-13f95711f522 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.034841] env[62405]: DEBUG nova.compute.provider_tree [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1533.243036] env[62405]: DEBUG oslo_concurrency.lockutils [req-0d07aea6-099f-433c-97d2-147d61ae1304 req-d88b37d6-775c-4358-9000-0828ad2d9ce9 service nova] Releasing lock "refresh_cache-b3647042-89a1-4d15-b85e-49a5c8def1d4" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1533.299693] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Releasing lock "refresh_cache-fbedaa93-5968-4b42-b93e-201d2b44b32b" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1533.300074] env[62405]: DEBUG nova.compute.manager [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] [instance: fbedaa93-5968-4b42-b93e-201d2b44b32b] Instance network_info: |[{"id": "19c7164f-ee95-4382-907d-6f3d78608802", "address": "fa:16:3e:1a:f2:11", "network": {"id": "b7d514ef-30bf-4d0c-b3fb-5eb207a7ce7d", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-370465885-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "73ff3070616c43658c616adb113aa50a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e350f83a-f581-4e10-ac16-0b0f7bfd3d38", "external-id": "nsx-vlan-transportzone-834", "segmentation_id": 834, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap19c7164f-ee", "ovs_interfaceid": "19c7164f-ee95-4382-907d-6f3d78608802", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1533.303235] env[62405]: DEBUG oslo_concurrency.lockutils [req-d338e8e7-8a91-424c-b667-b19e58901850 req-355110ac-6a41-472c-a75b-a3e016e0b3e7 service nova] Acquired lock "refresh_cache-fbedaa93-5968-4b42-b93e-201d2b44b32b" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1533.303406] env[62405]: DEBUG nova.network.neutron [req-d338e8e7-8a91-424c-b667-b19e58901850 req-355110ac-6a41-472c-a75b-a3e016e0b3e7 service nova] [instance: fbedaa93-5968-4b42-b93e-201d2b44b32b] Refreshing network info cache for port 19c7164f-ee95-4382-907d-6f3d78608802 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1533.308165] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] [instance: fbedaa93-5968-4b42-b93e-201d2b44b32b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1a:f2:11', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e350f83a-f581-4e10-ac16-0b0f7bfd3d38', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '19c7164f-ee95-4382-907d-6f3d78608802', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1533.319680] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Creating folder: Project (73ff3070616c43658c616adb113aa50a). Parent ref: group-v401284. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1533.321235] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-768235f0-c9ec-4f2a-88a5-44693901ca53 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.337659] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Created folder: Project (73ff3070616c43658c616adb113aa50a) in parent group-v401284. [ 1533.338560] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Creating folder: Instances. Parent ref: group-v401343. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1533.339279] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3de16108-b019-4dd1-8f5c-1213d6903223 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.357950] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Created folder: Instances in parent group-v401343. [ 1533.357950] env[62405]: DEBUG oslo.service.loopingcall [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1533.357950] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fbedaa93-5968-4b42-b93e-201d2b44b32b] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1533.357950] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-01c30c8a-a5f2-43b6-ae34-6cbe28f142f7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.387562] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1533.387562] env[62405]: value = "task-1946873" [ 1533.387562] env[62405]: _type = "Task" [ 1533.387562] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1533.398943] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946873, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1533.410688] env[62405]: DEBUG oslo_vmware.api [None req-e64ab66a-be13-4796-b011-3af711f24e74 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': task-1946870, 'name': PowerOnVM_Task, 'duration_secs': 0.984898} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1533.410997] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-e64ab66a-be13-4796-b011-3af711f24e74 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b3647042-89a1-4d15-b85e-49a5c8def1d4] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1533.411293] env[62405]: INFO nova.compute.manager [None req-e64ab66a-be13-4796-b011-3af711f24e74 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b3647042-89a1-4d15-b85e-49a5c8def1d4] Took 9.54 seconds to spawn the instance on the hypervisor. [ 1533.411488] env[62405]: DEBUG nova.compute.manager [None req-e64ab66a-be13-4796-b011-3af711f24e74 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b3647042-89a1-4d15-b85e-49a5c8def1d4] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1533.412438] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c7895c0-ad46-4e11-86b4-515d21835f45 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.478438] env[62405]: DEBUG nova.network.neutron [-] [instance: 6199de01-baca-4461-9572-111eda11adac] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1533.560767] env[62405]: ERROR nova.scheduler.client.report [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [req-3676928f-996f-4d01-8d62-e2f1af83816f] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7d5eded7-a501-4fa6-b1d3-60e273d555d7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-3676928f-996f-4d01-8d62-e2f1af83816f"}]} [ 1533.577619] env[62405]: DEBUG nova.network.neutron [-] [instance: 0eec4a5f-9f9b-4a86-a046-2e2d107adc48] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1533.587761] env[62405]: DEBUG nova.scheduler.client.report [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Refreshing inventories for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1533.610252] env[62405]: DEBUG nova.scheduler.client.report [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Updating ProviderTree inventory for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1533.610315] env[62405]: DEBUG nova.compute.provider_tree [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1533.625834] env[62405]: DEBUG nova.scheduler.client.report [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Refreshing aggregate associations for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7, aggregates: None {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1533.650468] env[62405]: DEBUG nova.scheduler.client.report [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Refreshing trait associations for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1533.845139] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f961be4e-24b2-4b3e-bf92-9b5770facde3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.877064] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-14ed0941-12b7-4219-9ade-8830ebc34cc2 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 058682a1-5240-4414-9203-c612ecd12999] Updating instance '058682a1-5240-4414-9203-c612ecd12999' progress to 0 {{(pid=62405) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1533.897505] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946873, 'name': CreateVM_Task, 'duration_secs': 0.399003} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1533.897679] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fbedaa93-5968-4b42-b93e-201d2b44b32b] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1533.898408] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1533.898569] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1533.899159] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1533.899460] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a1e24e95-08ae-4353-8094-d99f21fa887e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.905087] env[62405]: DEBUG oslo_vmware.api [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Waiting for the task: (returnval){ [ 1533.905087] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52a95220-2381-9d51-eb12-aebdcf586a1d" [ 1533.905087] env[62405]: _type = "Task" [ 1533.905087] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1533.917476] env[62405]: DEBUG oslo_vmware.api [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52a95220-2381-9d51-eb12-aebdcf586a1d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1533.920841] env[62405]: DEBUG nova.compute.manager [req-48de9b3e-9411-4b3a-9e3a-7f5a9f8e6ca4 req-93356c8a-3a26-472f-b2c4-8cb427a1c3d6 service nova] [instance: 0eec4a5f-9f9b-4a86-a046-2e2d107adc48] Received event network-vif-deleted-ffc190ac-0f46-477c-bb7a-53327b5884bb {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1533.933394] env[62405]: DEBUG oslo_concurrency.lockutils [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Acquiring lock "0feaeb5d-9f4a-4166-99b1-f213bc4fa458" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1533.933627] env[62405]: DEBUG oslo_concurrency.lockutils [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Lock "0feaeb5d-9f4a-4166-99b1-f213bc4fa458" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1533.937476] env[62405]: INFO nova.compute.manager [None req-e64ab66a-be13-4796-b011-3af711f24e74 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b3647042-89a1-4d15-b85e-49a5c8def1d4] Took 27.86 seconds to build instance. [ 1533.980881] env[62405]: INFO nova.compute.manager [-] [instance: 6199de01-baca-4461-9572-111eda11adac] Took 2.13 seconds to deallocate network for instance. [ 1534.085022] env[62405]: INFO nova.compute.manager [-] [instance: 0eec4a5f-9f9b-4a86-a046-2e2d107adc48] Took 1.59 seconds to deallocate network for instance. [ 1534.088281] env[62405]: DEBUG nova.compute.manager [req-7a9353e4-48e1-4f72-9607-1ff17b706c43 req-2593744d-fe23-4307-94f9-08825e522637 service nova] [instance: 6199de01-baca-4461-9572-111eda11adac] Received event network-vif-deleted-9cfd8f38-b13f-4aae-b836-8df8b8a50eb7 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1534.343115] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6cf4e16-da6c-4d34-8579-678b26fa678e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.353690] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-809165c4-0efd-4d47-bf90-6abb85537613 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.386316] env[62405]: DEBUG nova.network.neutron [req-d338e8e7-8a91-424c-b667-b19e58901850 req-355110ac-6a41-472c-a75b-a3e016e0b3e7 service nova] [instance: fbedaa93-5968-4b42-b93e-201d2b44b32b] Updated VIF entry in instance network info cache for port 19c7164f-ee95-4382-907d-6f3d78608802. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1534.386316] env[62405]: DEBUG nova.network.neutron [req-d338e8e7-8a91-424c-b667-b19e58901850 req-355110ac-6a41-472c-a75b-a3e016e0b3e7 service nova] [instance: fbedaa93-5968-4b42-b93e-201d2b44b32b] Updating instance_info_cache with network_info: [{"id": "19c7164f-ee95-4382-907d-6f3d78608802", "address": "fa:16:3e:1a:f2:11", "network": {"id": "b7d514ef-30bf-4d0c-b3fb-5eb207a7ce7d", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-370465885-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "73ff3070616c43658c616adb113aa50a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e350f83a-f581-4e10-ac16-0b0f7bfd3d38", "external-id": "nsx-vlan-transportzone-834", "segmentation_id": 834, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap19c7164f-ee", "ovs_interfaceid": "19c7164f-ee95-4382-907d-6f3d78608802", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1534.388186] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75a1cbd7-d57e-4b8c-9753-a41d3d374139 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.393738] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-14ed0941-12b7-4219-9ade-8830ebc34cc2 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 058682a1-5240-4414-9203-c612ecd12999] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1534.393738] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-20ed4f2c-49fe-426d-859d-09f2cde015a2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.406456] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f4c9551-a03c-4531-a3e1-89fc0caf18b6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.410549] env[62405]: DEBUG oslo_vmware.api [None req-14ed0941-12b7-4219-9ade-8830ebc34cc2 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Waiting for the task: (returnval){ [ 1534.410549] env[62405]: value = "task-1946874" [ 1534.410549] env[62405]: _type = "Task" [ 1534.410549] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1534.428373] env[62405]: DEBUG nova.compute.provider_tree [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1534.435966] env[62405]: DEBUG oslo_vmware.api [None req-14ed0941-12b7-4219-9ade-8830ebc34cc2 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Task: {'id': task-1946874, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1534.441924] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e64ab66a-be13-4796-b011-3af711f24e74 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Lock "b3647042-89a1-4d15-b85e-49a5c8def1d4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.307s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1534.441924] env[62405]: DEBUG oslo_vmware.api [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52a95220-2381-9d51-eb12-aebdcf586a1d, 'name': SearchDatastore_Task, 'duration_secs': 0.020937} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1534.442464] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1534.442958] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] [instance: fbedaa93-5968-4b42-b93e-201d2b44b32b] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1534.442958] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1534.445676] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1534.445676] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1534.445676] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9b7610c1-8f63-4338-84e5-ca249e494fdb {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.460743] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1534.460932] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1534.461692] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-708c3bc0-576b-483d-9420-2d682648a248 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.471107] env[62405]: DEBUG oslo_vmware.api [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Waiting for the task: (returnval){ [ 1534.471107] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52328426-1eae-024f-3ead-86bc6f20e741" [ 1534.471107] env[62405]: _type = "Task" [ 1534.471107] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1534.481806] env[62405]: DEBUG oslo_vmware.api [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52328426-1eae-024f-3ead-86bc6f20e741, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1534.497616] env[62405]: DEBUG oslo_concurrency.lockutils [None req-47ff8dfd-ab05-4f7c-b87a-2b9d28fef598 tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1534.571832] env[62405]: DEBUG oslo_concurrency.lockutils [None req-664be70a-5a90-415b-93d8-68e8e14e073c tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Acquiring lock "a73579d1-8647-49fe-98ce-0baffd1a558f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1534.571832] env[62405]: DEBUG oslo_concurrency.lockutils [None req-664be70a-5a90-415b-93d8-68e8e14e073c tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Lock "a73579d1-8647-49fe-98ce-0baffd1a558f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1534.602416] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f5ea0adf-5d7b-453b-be2f-0de070bac2fe tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1534.894095] env[62405]: DEBUG oslo_concurrency.lockutils [req-d338e8e7-8a91-424c-b667-b19e58901850 req-355110ac-6a41-472c-a75b-a3e016e0b3e7 service nova] Releasing lock "refresh_cache-fbedaa93-5968-4b42-b93e-201d2b44b32b" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1534.923951] env[62405]: DEBUG oslo_vmware.api [None req-14ed0941-12b7-4219-9ade-8830ebc34cc2 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Task: {'id': task-1946874, 'name': PowerOffVM_Task, 'duration_secs': 0.313345} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1534.923951] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-14ed0941-12b7-4219-9ade-8830ebc34cc2 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 058682a1-5240-4414-9203-c612ecd12999] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1534.923951] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-14ed0941-12b7-4219-9ade-8830ebc34cc2 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 058682a1-5240-4414-9203-c612ecd12999] Updating instance '058682a1-5240-4414-9203-c612ecd12999' progress to 17 {{(pid=62405) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1534.944423] env[62405]: DEBUG nova.compute.manager [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] [instance: 14dab775-19b4-4d0d-a7ee-67705f7e45ca] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1534.951229] env[62405]: ERROR nova.scheduler.client.report [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [req-0b4e594f-fbb0-4ced-a889-620cb68a4100] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7d5eded7-a501-4fa6-b1d3-60e273d555d7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-0b4e594f-fbb0-4ced-a889-620cb68a4100"}]} [ 1534.968321] env[62405]: DEBUG nova.scheduler.client.report [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Refreshing inventories for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1534.981819] env[62405]: DEBUG oslo_vmware.api [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52328426-1eae-024f-3ead-86bc6f20e741, 'name': SearchDatastore_Task, 'duration_secs': 0.017271} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1534.984401] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b8955e2b-47e0-47d3-ac7d-fe949554b27a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.988556] env[62405]: DEBUG nova.scheduler.client.report [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Updating ProviderTree inventory for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1534.988762] env[62405]: DEBUG nova.compute.provider_tree [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1534.998303] env[62405]: DEBUG oslo_vmware.api [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Waiting for the task: (returnval){ [ 1534.998303] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52a97eeb-7a2a-067c-2944-4285e7cd8bf2" [ 1534.998303] env[62405]: _type = "Task" [ 1534.998303] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1535.010977] env[62405]: DEBUG oslo_vmware.api [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52a97eeb-7a2a-067c-2944-4285e7cd8bf2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1535.012221] env[62405]: DEBUG nova.scheduler.client.report [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Refreshing aggregate associations for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7, aggregates: None {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1535.039100] env[62405]: DEBUG nova.scheduler.client.report [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Refreshing trait associations for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1535.429213] env[62405]: DEBUG nova.virt.hardware [None req-14ed0941-12b7-4219-9ade-8830ebc34cc2 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:21Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1535.429554] env[62405]: DEBUG nova.virt.hardware [None req-14ed0941-12b7-4219-9ade-8830ebc34cc2 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1535.429603] env[62405]: DEBUG nova.virt.hardware [None req-14ed0941-12b7-4219-9ade-8830ebc34cc2 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1535.430392] env[62405]: DEBUG nova.virt.hardware [None req-14ed0941-12b7-4219-9ade-8830ebc34cc2 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1535.430392] env[62405]: DEBUG nova.virt.hardware [None req-14ed0941-12b7-4219-9ade-8830ebc34cc2 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1535.430583] env[62405]: DEBUG nova.virt.hardware [None req-14ed0941-12b7-4219-9ade-8830ebc34cc2 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1535.430758] env[62405]: DEBUG nova.virt.hardware [None req-14ed0941-12b7-4219-9ade-8830ebc34cc2 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1535.430922] env[62405]: DEBUG nova.virt.hardware [None req-14ed0941-12b7-4219-9ade-8830ebc34cc2 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1535.431099] env[62405]: DEBUG nova.virt.hardware [None req-14ed0941-12b7-4219-9ade-8830ebc34cc2 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1535.431263] env[62405]: DEBUG nova.virt.hardware [None req-14ed0941-12b7-4219-9ade-8830ebc34cc2 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1535.431435] env[62405]: DEBUG nova.virt.hardware [None req-14ed0941-12b7-4219-9ade-8830ebc34cc2 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1535.436666] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a70910ff-33a7-43fc-8e60-4e102f4fb12d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.470971] env[62405]: DEBUG oslo_vmware.api [None req-14ed0941-12b7-4219-9ade-8830ebc34cc2 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Waiting for the task: (returnval){ [ 1535.470971] env[62405]: value = "task-1946875" [ 1535.470971] env[62405]: _type = "Task" [ 1535.470971] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1535.479303] env[62405]: DEBUG oslo_concurrency.lockutils [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1535.484167] env[62405]: DEBUG oslo_vmware.api [None req-14ed0941-12b7-4219-9ade-8830ebc34cc2 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Task: {'id': task-1946875, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1535.513757] env[62405]: DEBUG oslo_vmware.api [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52a97eeb-7a2a-067c-2944-4285e7cd8bf2, 'name': SearchDatastore_Task, 'duration_secs': 0.017023} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1535.514060] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1535.515501] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] fbedaa93-5968-4b42-b93e-201d2b44b32b/fbedaa93-5968-4b42-b93e-201d2b44b32b.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1535.515809] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-130e61fb-cb1c-4a69-8cb8-a48add8d99e2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.530208] env[62405]: DEBUG oslo_vmware.api [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Waiting for the task: (returnval){ [ 1535.530208] env[62405]: value = "task-1946876" [ 1535.530208] env[62405]: _type = "Task" [ 1535.530208] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1535.540759] env[62405]: DEBUG oslo_vmware.api [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Task: {'id': task-1946876, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1535.657361] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-314c5522-93b6-4ab1-9449-0a7bfb85ee95 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.667659] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e534d8fa-8bd9-433b-aa3a-7c264e92e0ed {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.706385] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17c04751-1a41-4b74-877f-a70534e310a1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.720395] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-718d57f2-9664-4dda-bfb5-c422cc16c6dd {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.735827] env[62405]: DEBUG nova.compute.provider_tree [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1535.985580] env[62405]: DEBUG oslo_vmware.api [None req-14ed0941-12b7-4219-9ade-8830ebc34cc2 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Task: {'id': task-1946875, 'name': ReconfigVM_Task, 'duration_secs': 0.198113} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1535.985920] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-14ed0941-12b7-4219-9ade-8830ebc34cc2 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 058682a1-5240-4414-9203-c612ecd12999] Updating instance '058682a1-5240-4414-9203-c612ecd12999' progress to 33 {{(pid=62405) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1536.050920] env[62405]: DEBUG oslo_vmware.api [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Task: {'id': task-1946876, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1536.277852] env[62405]: DEBUG nova.scheduler.client.report [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Updated inventory for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with generation 48 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1536.277852] env[62405]: DEBUG nova.compute.provider_tree [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Updating resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 generation from 48 to 49 during operation: update_inventory {{(pid=62405) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1536.278104] env[62405]: DEBUG nova.compute.provider_tree [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1536.496676] env[62405]: DEBUG nova.virt.hardware [None req-14ed0941-12b7-4219-9ade-8830ebc34cc2 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1536.496939] env[62405]: DEBUG nova.virt.hardware [None req-14ed0941-12b7-4219-9ade-8830ebc34cc2 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1536.498039] env[62405]: DEBUG nova.virt.hardware [None req-14ed0941-12b7-4219-9ade-8830ebc34cc2 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1536.500460] env[62405]: DEBUG nova.virt.hardware [None req-14ed0941-12b7-4219-9ade-8830ebc34cc2 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1536.500460] env[62405]: DEBUG nova.virt.hardware [None req-14ed0941-12b7-4219-9ade-8830ebc34cc2 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1536.500460] env[62405]: DEBUG nova.virt.hardware [None req-14ed0941-12b7-4219-9ade-8830ebc34cc2 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1536.500460] env[62405]: DEBUG nova.virt.hardware [None req-14ed0941-12b7-4219-9ade-8830ebc34cc2 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1536.500460] env[62405]: DEBUG nova.virt.hardware [None req-14ed0941-12b7-4219-9ade-8830ebc34cc2 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1536.500904] env[62405]: DEBUG nova.virt.hardware [None req-14ed0941-12b7-4219-9ade-8830ebc34cc2 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1536.500904] env[62405]: DEBUG nova.virt.hardware [None req-14ed0941-12b7-4219-9ade-8830ebc34cc2 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1536.500904] env[62405]: DEBUG nova.virt.hardware [None req-14ed0941-12b7-4219-9ade-8830ebc34cc2 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1536.509351] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-14ed0941-12b7-4219-9ade-8830ebc34cc2 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 058682a1-5240-4414-9203-c612ecd12999] Reconfiguring VM instance instance-00000007 to detach disk 2000 {{(pid=62405) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1536.509708] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0f039351-572e-428e-bae9-20eea264968e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.531931] env[62405]: DEBUG oslo_vmware.api [None req-14ed0941-12b7-4219-9ade-8830ebc34cc2 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Waiting for the task: (returnval){ [ 1536.531931] env[62405]: value = "task-1946877" [ 1536.531931] env[62405]: _type = "Task" [ 1536.531931] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1536.544128] env[62405]: DEBUG oslo_vmware.api [None req-14ed0941-12b7-4219-9ade-8830ebc34cc2 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Task: {'id': task-1946877, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1536.548200] env[62405]: DEBUG oslo_vmware.api [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Task: {'id': task-1946876, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.704269} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1536.548498] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] fbedaa93-5968-4b42-b93e-201d2b44b32b/fbedaa93-5968-4b42-b93e-201d2b44b32b.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1536.548723] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] [instance: fbedaa93-5968-4b42-b93e-201d2b44b32b] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1536.549755] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-64958ce1-7728-4af5-ac0c-3017817218f3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.558300] env[62405]: DEBUG oslo_vmware.api [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Waiting for the task: (returnval){ [ 1536.558300] env[62405]: value = "task-1946878" [ 1536.558300] env[62405]: _type = "Task" [ 1536.558300] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1536.571167] env[62405]: DEBUG oslo_vmware.api [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Task: {'id': task-1946878, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1536.758938] env[62405]: DEBUG nova.compute.manager [None req-7b88e272-7c72-414d-8240-40a604e31ff5 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b8ff115b-64f1-4584-afa2-478c5e6b726b] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1536.759899] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dd9a268-b948-4570-be47-269a8482631e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.785048] env[62405]: DEBUG oslo_concurrency.lockutils [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 7.381s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1536.785048] env[62405]: DEBUG nova.compute.manager [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [instance: 777ddb84-25b9-4da6-be6b-a2289dbf510a] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1536.786796] env[62405]: DEBUG oslo_concurrency.lockutils [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.513s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1536.788541] env[62405]: INFO nova.compute.claims [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] [instance: f8c6f99f-499f-4886-aae9-5f08969175f6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1537.040910] env[62405]: DEBUG oslo_vmware.api [None req-14ed0941-12b7-4219-9ade-8830ebc34cc2 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Task: {'id': task-1946877, 'name': ReconfigVM_Task, 'duration_secs': 0.199849} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1537.045017] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-14ed0941-12b7-4219-9ade-8830ebc34cc2 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 058682a1-5240-4414-9203-c612ecd12999] Reconfigured VM instance instance-00000007 to detach disk 2000 {{(pid=62405) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1537.045017] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f09aaa35-a271-4d79-a177-1faeb5baca4e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.068896] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-14ed0941-12b7-4219-9ade-8830ebc34cc2 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 058682a1-5240-4414-9203-c612ecd12999] Reconfiguring VM instance instance-00000007 to attach disk [datastore1] 058682a1-5240-4414-9203-c612ecd12999/058682a1-5240-4414-9203-c612ecd12999.vmdk or device None with type thin {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1537.073237] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e46c3514-132c-46a4-a61a-1446b962785e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.094284] env[62405]: DEBUG oslo_vmware.api [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Task: {'id': task-1946878, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078689} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1537.095931] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] [instance: fbedaa93-5968-4b42-b93e-201d2b44b32b] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1537.096416] env[62405]: DEBUG oslo_vmware.api [None req-14ed0941-12b7-4219-9ade-8830ebc34cc2 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Waiting for the task: (returnval){ [ 1537.096416] env[62405]: value = "task-1946879" [ 1537.096416] env[62405]: _type = "Task" [ 1537.096416] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1537.097453] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32389871-e563-408d-8058-40551bfa9d44 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.130918] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] [instance: fbedaa93-5968-4b42-b93e-201d2b44b32b] Reconfiguring VM instance instance-00000014 to attach disk [datastore1] fbedaa93-5968-4b42-b93e-201d2b44b32b/fbedaa93-5968-4b42-b93e-201d2b44b32b.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1537.135592] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5de62a61-0b54-4596-9de5-302134f99dc7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.151962] env[62405]: DEBUG oslo_vmware.api [None req-14ed0941-12b7-4219-9ade-8830ebc34cc2 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Task: {'id': task-1946879, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1537.158416] env[62405]: DEBUG oslo_vmware.api [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Waiting for the task: (returnval){ [ 1537.158416] env[62405]: value = "task-1946880" [ 1537.158416] env[62405]: _type = "Task" [ 1537.158416] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1537.168133] env[62405]: DEBUG oslo_vmware.api [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Task: {'id': task-1946880, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1537.273094] env[62405]: INFO nova.compute.manager [None req-7b88e272-7c72-414d-8240-40a604e31ff5 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b8ff115b-64f1-4584-afa2-478c5e6b726b] instance snapshotting [ 1537.276263] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c66e07e5-1806-4280-ba3b-671921c78f30 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.306165] env[62405]: DEBUG nova.compute.utils [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1537.311617] env[62405]: DEBUG nova.compute.manager [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [instance: 777ddb84-25b9-4da6-be6b-a2289dbf510a] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1537.312023] env[62405]: DEBUG nova.network.neutron [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [instance: 777ddb84-25b9-4da6-be6b-a2289dbf510a] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1537.315071] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e08aaf03-77d4-42ae-87d7-b74acfb2201d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.384694] env[62405]: DEBUG nova.policy [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7c9cac2fdc8246fd9bc4664cf94d1952', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '18b4edb74b5d4f7a95565aebf78c444f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1537.612245] env[62405]: DEBUG oslo_vmware.api [None req-14ed0941-12b7-4219-9ade-8830ebc34cc2 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Task: {'id': task-1946879, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1537.673878] env[62405]: DEBUG oslo_vmware.api [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Task: {'id': task-1946880, 'name': ReconfigVM_Task, 'duration_secs': 0.294709} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1537.676965] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] [instance: fbedaa93-5968-4b42-b93e-201d2b44b32b] Reconfigured VM instance instance-00000014 to attach disk [datastore1] fbedaa93-5968-4b42-b93e-201d2b44b32b/fbedaa93-5968-4b42-b93e-201d2b44b32b.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1537.676965] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6a7cd1c8-c657-40fb-82c4-b034e7fc0aff {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.683792] env[62405]: DEBUG oslo_vmware.api [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Waiting for the task: (returnval){ [ 1537.683792] env[62405]: value = "task-1946881" [ 1537.683792] env[62405]: _type = "Task" [ 1537.683792] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1537.693565] env[62405]: DEBUG oslo_vmware.api [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Task: {'id': task-1946881, 'name': Rename_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1537.817067] env[62405]: DEBUG nova.compute.manager [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [instance: 777ddb84-25b9-4da6-be6b-a2289dbf510a] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1537.829803] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-7b88e272-7c72-414d-8240-40a604e31ff5 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b8ff115b-64f1-4584-afa2-478c5e6b726b] Creating Snapshot of the VM instance {{(pid=62405) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1537.829803] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-466b2ef6-6619-47d1-a842-2506db62fa2b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.836898] env[62405]: DEBUG nova.network.neutron [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [instance: 777ddb84-25b9-4da6-be6b-a2289dbf510a] Successfully created port: c3206a84-3d77-4640-bfae-253a30dfa63c {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1537.845506] env[62405]: DEBUG oslo_vmware.api [None req-7b88e272-7c72-414d-8240-40a604e31ff5 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Waiting for the task: (returnval){ [ 1537.845506] env[62405]: value = "task-1946882" [ 1537.845506] env[62405]: _type = "Task" [ 1537.845506] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1537.863951] env[62405]: DEBUG oslo_vmware.api [None req-7b88e272-7c72-414d-8240-40a604e31ff5 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': task-1946882, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1538.111513] env[62405]: DEBUG oslo_vmware.api [None req-14ed0941-12b7-4219-9ade-8830ebc34cc2 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Task: {'id': task-1946879, 'name': ReconfigVM_Task, 'duration_secs': 0.732161} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1538.115597] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-14ed0941-12b7-4219-9ade-8830ebc34cc2 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 058682a1-5240-4414-9203-c612ecd12999] Reconfigured VM instance instance-00000007 to attach disk [datastore1] 058682a1-5240-4414-9203-c612ecd12999/058682a1-5240-4414-9203-c612ecd12999.vmdk or device None with type thin {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1538.115597] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-14ed0941-12b7-4219-9ade-8830ebc34cc2 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 058682a1-5240-4414-9203-c612ecd12999] Updating instance '058682a1-5240-4414-9203-c612ecd12999' progress to 50 {{(pid=62405) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1538.174418] env[62405]: DEBUG oslo_concurrency.lockutils [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquiring lock "23748dfd-7c60-41db-8acb-7b49cf1c27db" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1538.174511] env[62405]: DEBUG oslo_concurrency.lockutils [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Lock "23748dfd-7c60-41db-8acb-7b49cf1c27db" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1538.199793] env[62405]: DEBUG oslo_vmware.api [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Task: {'id': task-1946881, 'name': Rename_Task, 'duration_secs': 0.166985} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1538.199793] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] [instance: fbedaa93-5968-4b42-b93e-201d2b44b32b] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1538.200673] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-902d643f-d4fa-4486-b294-8707ae347b31 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.208745] env[62405]: DEBUG oslo_vmware.api [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Waiting for the task: (returnval){ [ 1538.208745] env[62405]: value = "task-1946883" [ 1538.208745] env[62405]: _type = "Task" [ 1538.208745] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1538.217448] env[62405]: DEBUG oslo_vmware.api [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Task: {'id': task-1946883, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1538.359726] env[62405]: DEBUG oslo_vmware.api [None req-7b88e272-7c72-414d-8240-40a604e31ff5 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': task-1946882, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1538.453850] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-339132bd-2763-4aeb-933f-2ad6e439421a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.461808] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7d68c3f-ebfe-4fc2-a56a-a829049e1b47 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.498961] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7878fe7b-b9a0-49f6-9baf-771773e93949 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.507783] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a06875d1-5dc2-42fa-82ad-38540a14645f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.524834] env[62405]: DEBUG nova.compute.provider_tree [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1538.624893] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e824624c-8c94-49bf-bd57-1bfbb0f4e2d4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.654469] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6697b35-ce81-4231-a308-ccee0cf00240 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.678336] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-14ed0941-12b7-4219-9ade-8830ebc34cc2 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 058682a1-5240-4414-9203-c612ecd12999] Updating instance '058682a1-5240-4414-9203-c612ecd12999' progress to 67 {{(pid=62405) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1538.720924] env[62405]: DEBUG oslo_vmware.api [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Task: {'id': task-1946883, 'name': PowerOnVM_Task} progress is 37%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1538.830027] env[62405]: DEBUG nova.compute.manager [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [instance: 777ddb84-25b9-4da6-be6b-a2289dbf510a] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1538.864185] env[62405]: DEBUG nova.virt.hardware [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1538.864185] env[62405]: DEBUG nova.virt.hardware [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1538.864305] env[62405]: DEBUG nova.virt.hardware [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1538.866118] env[62405]: DEBUG nova.virt.hardware [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1538.866118] env[62405]: DEBUG nova.virt.hardware [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1538.866118] env[62405]: DEBUG nova.virt.hardware [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1538.866118] env[62405]: DEBUG nova.virt.hardware [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1538.866118] env[62405]: DEBUG nova.virt.hardware [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1538.866695] env[62405]: DEBUG nova.virt.hardware [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1538.866695] env[62405]: DEBUG nova.virt.hardware [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1538.866695] env[62405]: DEBUG nova.virt.hardware [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1538.866695] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4b95eff-de42-4b65-95a6-5f27eecc9344 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.874517] env[62405]: DEBUG oslo_vmware.api [None req-7b88e272-7c72-414d-8240-40a604e31ff5 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': task-1946882, 'name': CreateSnapshot_Task, 'duration_secs': 0.975091} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1538.874517] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-7b88e272-7c72-414d-8240-40a604e31ff5 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b8ff115b-64f1-4584-afa2-478c5e6b726b] Created Snapshot of the VM instance {{(pid=62405) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1538.874517] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b20b8b86-35c7-4c7c-a9f9-8fac6a9d00ee {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.882202] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1469370-beb5-4e64-9f9e-c7eaeb0a1bc5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.056255] env[62405]: ERROR nova.scheduler.client.report [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] [req-88ab98ad-548d-4dce-8b59-38e4b1537953] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7d5eded7-a501-4fa6-b1d3-60e273d555d7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-88ab98ad-548d-4dce-8b59-38e4b1537953"}]} [ 1539.071160] env[62405]: DEBUG nova.scheduler.client.report [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Refreshing inventories for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1539.087956] env[62405]: DEBUG nova.scheduler.client.report [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Updating ProviderTree inventory for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1539.088224] env[62405]: DEBUG nova.compute.provider_tree [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1539.101902] env[62405]: DEBUG nova.scheduler.client.report [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Refreshing aggregate associations for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7, aggregates: None {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1539.130854] env[62405]: DEBUG nova.scheduler.client.report [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Refreshing trait associations for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1539.225186] env[62405]: DEBUG oslo_vmware.api [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Task: {'id': task-1946883, 'name': PowerOnVM_Task} progress is 72%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1539.281998] env[62405]: DEBUG nova.network.neutron [None req-14ed0941-12b7-4219-9ade-8830ebc34cc2 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 058682a1-5240-4414-9203-c612ecd12999] Port f2f99aa3-770a-41cb-bb49-775f9f0f2708 binding to destination host cpu-1 is already ACTIVE {{(pid=62405) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1539.406183] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-7b88e272-7c72-414d-8240-40a604e31ff5 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b8ff115b-64f1-4584-afa2-478c5e6b726b] Creating linked-clone VM from snapshot {{(pid=62405) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1539.409978] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-4a4c03cb-2723-46d1-88bb-cad6765dca5c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.424021] env[62405]: DEBUG oslo_vmware.api [None req-7b88e272-7c72-414d-8240-40a604e31ff5 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Waiting for the task: (returnval){ [ 1539.424021] env[62405]: value = "task-1946884" [ 1539.424021] env[62405]: _type = "Task" [ 1539.424021] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1539.439105] env[62405]: DEBUG oslo_vmware.api [None req-7b88e272-7c72-414d-8240-40a604e31ff5 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': task-1946884, 'name': CloneVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1539.724117] env[62405]: DEBUG oslo_vmware.api [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Task: {'id': task-1946883, 'name': PowerOnVM_Task} progress is 91%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1539.752416] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a5d35e8-0732-4778-813a-73c26066dcd3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.761384] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af063a51-fe7a-4f5d-8b3d-03cde5791ad0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.801317] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7e30442-6cee-4735-ad21-5b6529e333f4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.808103] env[62405]: DEBUG nova.network.neutron [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [instance: 777ddb84-25b9-4da6-be6b-a2289dbf510a] Successfully updated port: c3206a84-3d77-4640-bfae-253a30dfa63c {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1539.813969] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-707fb70c-fdba-40bc-9a42-5a731b91aa66 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.829934] env[62405]: DEBUG nova.compute.provider_tree [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1539.833374] env[62405]: DEBUG nova.compute.manager [req-ff20b175-ff1a-48c8-9c89-b37b8b11ead9 req-fdff6d9b-b8e1-458e-a5bf-bf1f93ae1a21 service nova] [instance: 777ddb84-25b9-4da6-be6b-a2289dbf510a] Received event network-vif-plugged-c3206a84-3d77-4640-bfae-253a30dfa63c {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1539.836144] env[62405]: DEBUG oslo_concurrency.lockutils [req-ff20b175-ff1a-48c8-9c89-b37b8b11ead9 req-fdff6d9b-b8e1-458e-a5bf-bf1f93ae1a21 service nova] Acquiring lock "777ddb84-25b9-4da6-be6b-a2289dbf510a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1539.836144] env[62405]: DEBUG oslo_concurrency.lockutils [req-ff20b175-ff1a-48c8-9c89-b37b8b11ead9 req-fdff6d9b-b8e1-458e-a5bf-bf1f93ae1a21 service nova] Lock "777ddb84-25b9-4da6-be6b-a2289dbf510a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1539.836144] env[62405]: DEBUG oslo_concurrency.lockutils [req-ff20b175-ff1a-48c8-9c89-b37b8b11ead9 req-fdff6d9b-b8e1-458e-a5bf-bf1f93ae1a21 service nova] Lock "777ddb84-25b9-4da6-be6b-a2289dbf510a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1539.836144] env[62405]: DEBUG nova.compute.manager [req-ff20b175-ff1a-48c8-9c89-b37b8b11ead9 req-fdff6d9b-b8e1-458e-a5bf-bf1f93ae1a21 service nova] [instance: 777ddb84-25b9-4da6-be6b-a2289dbf510a] No waiting events found dispatching network-vif-plugged-c3206a84-3d77-4640-bfae-253a30dfa63c {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1539.836144] env[62405]: WARNING nova.compute.manager [req-ff20b175-ff1a-48c8-9c89-b37b8b11ead9 req-fdff6d9b-b8e1-458e-a5bf-bf1f93ae1a21 service nova] [instance: 777ddb84-25b9-4da6-be6b-a2289dbf510a] Received unexpected event network-vif-plugged-c3206a84-3d77-4640-bfae-253a30dfa63c for instance with vm_state building and task_state spawning. [ 1539.936309] env[62405]: DEBUG oslo_vmware.api [None req-7b88e272-7c72-414d-8240-40a604e31ff5 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': task-1946884, 'name': CloneVM_Task} progress is 94%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1540.233673] env[62405]: DEBUG oslo_vmware.api [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Task: {'id': task-1946883, 'name': PowerOnVM_Task, 'duration_secs': 1.668529} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1540.233934] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] [instance: fbedaa93-5968-4b42-b93e-201d2b44b32b] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1540.233934] env[62405]: INFO nova.compute.manager [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] [instance: fbedaa93-5968-4b42-b93e-201d2b44b32b] Took 11.43 seconds to spawn the instance on the hypervisor. [ 1540.234121] env[62405]: DEBUG nova.compute.manager [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] [instance: fbedaa93-5968-4b42-b93e-201d2b44b32b] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1540.235050] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60a6c0ef-fd81-4f32-ab06-09d3b5745ac4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.315098] env[62405]: DEBUG oslo_concurrency.lockutils [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Acquiring lock "refresh_cache-777ddb84-25b9-4da6-be6b-a2289dbf510a" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1540.315249] env[62405]: DEBUG oslo_concurrency.lockutils [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Acquired lock "refresh_cache-777ddb84-25b9-4da6-be6b-a2289dbf510a" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1540.315473] env[62405]: DEBUG nova.network.neutron [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [instance: 777ddb84-25b9-4da6-be6b-a2289dbf510a] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1540.326068] env[62405]: DEBUG oslo_concurrency.lockutils [None req-14ed0941-12b7-4219-9ade-8830ebc34cc2 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Acquiring lock "058682a1-5240-4414-9203-c612ecd12999-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1540.326068] env[62405]: DEBUG oslo_concurrency.lockutils [None req-14ed0941-12b7-4219-9ade-8830ebc34cc2 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Lock "058682a1-5240-4414-9203-c612ecd12999-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1540.326068] env[62405]: DEBUG oslo_concurrency.lockutils [None req-14ed0941-12b7-4219-9ade-8830ebc34cc2 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Lock "058682a1-5240-4414-9203-c612ecd12999-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1540.373764] env[62405]: DEBUG nova.scheduler.client.report [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Updated inventory for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with generation 50 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1540.374037] env[62405]: DEBUG nova.compute.provider_tree [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Updating resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 generation from 50 to 51 during operation: update_inventory {{(pid=62405) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1540.375591] env[62405]: DEBUG nova.compute.provider_tree [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1540.436038] env[62405]: DEBUG oslo_vmware.api [None req-7b88e272-7c72-414d-8240-40a604e31ff5 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': task-1946884, 'name': CloneVM_Task} progress is 94%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1540.762072] env[62405]: INFO nova.compute.manager [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] [instance: fbedaa93-5968-4b42-b93e-201d2b44b32b] Took 33.33 seconds to build instance. [ 1540.869651] env[62405]: DEBUG nova.network.neutron [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [instance: 777ddb84-25b9-4da6-be6b-a2289dbf510a] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1540.880369] env[62405]: DEBUG oslo_concurrency.lockutils [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.093s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1540.880923] env[62405]: DEBUG nova.compute.manager [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] [instance: f8c6f99f-499f-4886-aae9-5f08969175f6] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1540.883833] env[62405]: DEBUG oslo_concurrency.lockutils [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.440s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1540.885123] env[62405]: INFO nova.compute.claims [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] [instance: b21dc1e7-dacd-4154-9bc3-0fa3774695a8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1540.942089] env[62405]: DEBUG oslo_vmware.api [None req-7b88e272-7c72-414d-8240-40a604e31ff5 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': task-1946884, 'name': CloneVM_Task} progress is 94%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1541.060876] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Acquiring lock "67bf25ea-5774-4246-a3e6-2aeb0ebf6731" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1541.061672] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Lock "67bf25ea-5774-4246-a3e6-2aeb0ebf6731" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1541.106686] env[62405]: DEBUG nova.network.neutron [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [instance: 777ddb84-25b9-4da6-be6b-a2289dbf510a] Updating instance_info_cache with network_info: [{"id": "c3206a84-3d77-4640-bfae-253a30dfa63c", "address": "fa:16:3e:a2:56:1e", "network": {"id": "5f90763f-2c20-4d8a-9274-7e692071a6cd", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1386170130-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "18b4edb74b5d4f7a95565aebf78c444f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d8383707-f093-40a7-a5ba-31b0e07cac45", "external-id": "cl2-zone-18", "segmentation_id": 18, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc3206a84-3d", "ovs_interfaceid": "c3206a84-3d77-4640-bfae-253a30dfa63c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1541.265220] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f284a94e-cf11-4fe8-bd60-15bf2b6111e7 tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Lock "fbedaa93-5968-4b42-b93e-201d2b44b32b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 42.661s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1541.399143] env[62405]: DEBUG nova.compute.utils [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1541.401228] env[62405]: DEBUG oslo_concurrency.lockutils [None req-14ed0941-12b7-4219-9ade-8830ebc34cc2 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Acquiring lock "refresh_cache-058682a1-5240-4414-9203-c612ecd12999" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1541.401228] env[62405]: DEBUG oslo_concurrency.lockutils [None req-14ed0941-12b7-4219-9ade-8830ebc34cc2 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Acquired lock "refresh_cache-058682a1-5240-4414-9203-c612ecd12999" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1541.401394] env[62405]: DEBUG nova.network.neutron [None req-14ed0941-12b7-4219-9ade-8830ebc34cc2 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 058682a1-5240-4414-9203-c612ecd12999] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1541.402378] env[62405]: DEBUG nova.compute.manager [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] [instance: f8c6f99f-499f-4886-aae9-5f08969175f6] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1541.402532] env[62405]: DEBUG nova.network.neutron [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] [instance: f8c6f99f-499f-4886-aae9-5f08969175f6] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1541.438399] env[62405]: DEBUG oslo_vmware.api [None req-7b88e272-7c72-414d-8240-40a604e31ff5 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': task-1946884, 'name': CloneVM_Task} progress is 95%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1541.455368] env[62405]: DEBUG nova.policy [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f4e622c8c2b8400cb8fdb02cf6c093e6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4d7f990209644a10a5b12c49517e0196', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1541.613020] env[62405]: DEBUG oslo_concurrency.lockutils [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Releasing lock "refresh_cache-777ddb84-25b9-4da6-be6b-a2289dbf510a" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1541.613020] env[62405]: DEBUG nova.compute.manager [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [instance: 777ddb84-25b9-4da6-be6b-a2289dbf510a] Instance network_info: |[{"id": "c3206a84-3d77-4640-bfae-253a30dfa63c", "address": "fa:16:3e:a2:56:1e", "network": {"id": "5f90763f-2c20-4d8a-9274-7e692071a6cd", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1386170130-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "18b4edb74b5d4f7a95565aebf78c444f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d8383707-f093-40a7-a5ba-31b0e07cac45", "external-id": "cl2-zone-18", "segmentation_id": 18, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc3206a84-3d", "ovs_interfaceid": "c3206a84-3d77-4640-bfae-253a30dfa63c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1541.613232] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [instance: 777ddb84-25b9-4da6-be6b-a2289dbf510a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a2:56:1e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd8383707-f093-40a7-a5ba-31b0e07cac45', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c3206a84-3d77-4640-bfae-253a30dfa63c', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1541.623466] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Creating folder: Project (18b4edb74b5d4f7a95565aebf78c444f). Parent ref: group-v401284. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1541.624285] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-32ce2a79-b889-4ca2-8698-a9e9063b7ae8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.643441] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Created folder: Project (18b4edb74b5d4f7a95565aebf78c444f) in parent group-v401284. [ 1541.643637] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Creating folder: Instances. Parent ref: group-v401348. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1541.643906] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-83a598de-f5d9-43db-b234-e4042112d70c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.658702] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Created folder: Instances in parent group-v401348. [ 1541.658702] env[62405]: DEBUG oslo.service.loopingcall [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1541.660771] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 777ddb84-25b9-4da6-be6b-a2289dbf510a] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1541.661389] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3de3af0b-21d6-4ee8-b04c-8bd7ebdd58e5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.689349] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1541.689349] env[62405]: value = "task-1946887" [ 1541.689349] env[62405]: _type = "Task" [ 1541.689349] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1541.699038] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946887, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1541.769233] env[62405]: DEBUG nova.compute.manager [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 900b95b5-fe5a-46c1-909a-f81b82ced0ef] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1541.807678] env[62405]: DEBUG nova.network.neutron [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] [instance: f8c6f99f-499f-4886-aae9-5f08969175f6] Successfully created port: 1b9bb4b7-58d5-4182-ad5b-0a10e3a34546 {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1541.907994] env[62405]: DEBUG nova.compute.manager [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] [instance: f8c6f99f-499f-4886-aae9-5f08969175f6] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1541.940456] env[62405]: DEBUG oslo_vmware.api [None req-7b88e272-7c72-414d-8240-40a604e31ff5 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': task-1946884, 'name': CloneVM_Task, 'duration_secs': 2.386334} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1541.940456] env[62405]: INFO nova.virt.vmwareapi.vmops [None req-7b88e272-7c72-414d-8240-40a604e31ff5 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b8ff115b-64f1-4584-afa2-478c5e6b726b] Created linked-clone VM from snapshot [ 1541.940456] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23fa88cb-35d4-4dc7-b3fb-32fd4dea3e5a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.960905] env[62405]: DEBUG nova.virt.vmwareapi.images [None req-7b88e272-7c72-414d-8240-40a604e31ff5 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b8ff115b-64f1-4584-afa2-478c5e6b726b] Uploading image 47c64ab2-fad8-4839-a33b-53a897e09d15 {{(pid=62405) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1541.994317] env[62405]: DEBUG oslo_vmware.rw_handles [None req-7b88e272-7c72-414d-8240-40a604e31ff5 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1541.994317] env[62405]: value = "vm-401347" [ 1541.994317] env[62405]: _type = "VirtualMachine" [ 1541.994317] env[62405]: }. {{(pid=62405) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1541.994317] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-cf5be50f-da40-43aa-a119-510f2cbfb4ad {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.011161] env[62405]: DEBUG oslo_vmware.rw_handles [None req-7b88e272-7c72-414d-8240-40a604e31ff5 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Lease: (returnval){ [ 1542.011161] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5214d133-be4d-3f91-e4bc-247498b0598d" [ 1542.011161] env[62405]: _type = "HttpNfcLease" [ 1542.011161] env[62405]: } obtained for exporting VM: (result){ [ 1542.011161] env[62405]: value = "vm-401347" [ 1542.011161] env[62405]: _type = "VirtualMachine" [ 1542.011161] env[62405]: }. {{(pid=62405) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1542.011161] env[62405]: DEBUG oslo_vmware.api [None req-7b88e272-7c72-414d-8240-40a604e31ff5 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Waiting for the lease: (returnval){ [ 1542.011161] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5214d133-be4d-3f91-e4bc-247498b0598d" [ 1542.011161] env[62405]: _type = "HttpNfcLease" [ 1542.011161] env[62405]: } to be ready. {{(pid=62405) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1542.024020] env[62405]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1542.024020] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5214d133-be4d-3f91-e4bc-247498b0598d" [ 1542.024020] env[62405]: _type = "HttpNfcLease" [ 1542.024020] env[62405]: } is initializing. {{(pid=62405) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1542.082949] env[62405]: DEBUG oslo_concurrency.lockutils [None req-dab938bf-c818-479f-b806-6c3c4b80f244 tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Acquiring lock "7db1b086-942e-4890-8750-0d717e522786" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1542.082949] env[62405]: DEBUG oslo_concurrency.lockutils [None req-dab938bf-c818-479f-b806-6c3c4b80f244 tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Lock "7db1b086-942e-4890-8750-0d717e522786" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1542.082949] env[62405]: DEBUG oslo_concurrency.lockutils [None req-dab938bf-c818-479f-b806-6c3c4b80f244 tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Acquiring lock "7db1b086-942e-4890-8750-0d717e522786-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1542.082949] env[62405]: DEBUG oslo_concurrency.lockutils [None req-dab938bf-c818-479f-b806-6c3c4b80f244 tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Lock "7db1b086-942e-4890-8750-0d717e522786-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1542.083383] env[62405]: DEBUG oslo_concurrency.lockutils [None req-dab938bf-c818-479f-b806-6c3c4b80f244 tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Lock "7db1b086-942e-4890-8750-0d717e522786-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1542.085538] env[62405]: INFO nova.compute.manager [None req-dab938bf-c818-479f-b806-6c3c4b80f244 tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] [instance: 7db1b086-942e-4890-8750-0d717e522786] Terminating instance [ 1542.205296] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946887, 'name': CreateVM_Task, 'duration_secs': 0.430128} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1542.205533] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 777ddb84-25b9-4da6-be6b-a2289dbf510a] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1542.206346] env[62405]: DEBUG oslo_concurrency.lockutils [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1542.206631] env[62405]: DEBUG oslo_concurrency.lockutils [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1542.207101] env[62405]: DEBUG oslo_concurrency.lockutils [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1542.207700] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-990f0b6e-47ce-4f8c-b6aa-c38f7327ded6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.219426] env[62405]: DEBUG oslo_vmware.api [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Waiting for the task: (returnval){ [ 1542.219426] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]521bc675-4207-de67-d38e-5b451a5a64fd" [ 1542.219426] env[62405]: _type = "Task" [ 1542.219426] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1542.233828] env[62405]: DEBUG oslo_vmware.api [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]521bc675-4207-de67-d38e-5b451a5a64fd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1542.289201] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1542.429020] env[62405]: DEBUG nova.compute.manager [req-38a2d39d-7c79-4364-87f4-b16e0013826b req-946d049b-7b39-4590-a4ca-476751cfc10f service nova] [instance: 777ddb84-25b9-4da6-be6b-a2289dbf510a] Received event network-changed-c3206a84-3d77-4640-bfae-253a30dfa63c {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1542.429020] env[62405]: DEBUG nova.compute.manager [req-38a2d39d-7c79-4364-87f4-b16e0013826b req-946d049b-7b39-4590-a4ca-476751cfc10f service nova] [instance: 777ddb84-25b9-4da6-be6b-a2289dbf510a] Refreshing instance network info cache due to event network-changed-c3206a84-3d77-4640-bfae-253a30dfa63c. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1542.429020] env[62405]: DEBUG oslo_concurrency.lockutils [req-38a2d39d-7c79-4364-87f4-b16e0013826b req-946d049b-7b39-4590-a4ca-476751cfc10f service nova] Acquiring lock "refresh_cache-777ddb84-25b9-4da6-be6b-a2289dbf510a" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1542.429020] env[62405]: DEBUG oslo_concurrency.lockutils [req-38a2d39d-7c79-4364-87f4-b16e0013826b req-946d049b-7b39-4590-a4ca-476751cfc10f service nova] Acquired lock "refresh_cache-777ddb84-25b9-4da6-be6b-a2289dbf510a" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1542.429020] env[62405]: DEBUG nova.network.neutron [req-38a2d39d-7c79-4364-87f4-b16e0013826b req-946d049b-7b39-4590-a4ca-476751cfc10f service nova] [instance: 777ddb84-25b9-4da6-be6b-a2289dbf510a] Refreshing network info cache for port c3206a84-3d77-4640-bfae-253a30dfa63c {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1542.455227] env[62405]: DEBUG nova.compute.manager [None req-bd5321f9-66d9-41b1-b9cb-dd9e8b04008c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] [instance: fbedaa93-5968-4b42-b93e-201d2b44b32b] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1542.455227] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-055f72dc-7d2a-4959-9783-c5043abd1f44 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.519991] env[62405]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1542.519991] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5214d133-be4d-3f91-e4bc-247498b0598d" [ 1542.519991] env[62405]: _type = "HttpNfcLease" [ 1542.519991] env[62405]: } is ready. {{(pid=62405) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1542.521166] env[62405]: DEBUG oslo_vmware.rw_handles [None req-7b88e272-7c72-414d-8240-40a604e31ff5 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1542.521166] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5214d133-be4d-3f91-e4bc-247498b0598d" [ 1542.521166] env[62405]: _type = "HttpNfcLease" [ 1542.521166] env[62405]: }. {{(pid=62405) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1542.521896] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3e3db6e-9164-46b0-81b6-99117a2d86e6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.526079] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdde438b-03a8-4112-8b07-9aaf4fb7a657 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.539476] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b94e233-3416-42cd-b741-15f4496ac71e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.543568] env[62405]: DEBUG oslo_vmware.rw_handles [None req-7b88e272-7c72-414d-8240-40a604e31ff5 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5240e3b0-8f4d-18fd-a617-f9080a8ab858/disk-0.vmdk from lease info. {{(pid=62405) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1542.543568] env[62405]: DEBUG oslo_vmware.rw_handles [None req-7b88e272-7c72-414d-8240-40a604e31ff5 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5240e3b0-8f4d-18fd-a617-f9080a8ab858/disk-0.vmdk for reading. {{(pid=62405) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1542.605619] env[62405]: DEBUG nova.compute.manager [None req-dab938bf-c818-479f-b806-6c3c4b80f244 tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] [instance: 7db1b086-942e-4890-8750-0d717e522786] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1542.605950] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-dab938bf-c818-479f-b806-6c3c4b80f244 tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] [instance: 7db1b086-942e-4890-8750-0d717e522786] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1542.632596] env[62405]: DEBUG nova.network.neutron [None req-14ed0941-12b7-4219-9ade-8830ebc34cc2 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 058682a1-5240-4414-9203-c612ecd12999] Updating instance_info_cache with network_info: [{"id": "f2f99aa3-770a-41cb-bb49-775f9f0f2708", "address": "fa:16:3e:ed:e3:4c", "network": {"id": "9e3e6e3d-df77-428f-b577-e4a42e82ec43", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.165", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "69dc3a146cd14230b1180689e2fea090", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31e77685-b4dd-4810-80ef-24115ea9ea62", "external-id": "nsx-vlan-transportzone-56", "segmentation_id": 56, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf2f99aa3-77", "ovs_interfaceid": "f2f99aa3-770a-41cb-bb49-775f9f0f2708", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1542.634635] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a2aef7e-133c-47a9-b96c-ecaa75cbcc34 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.639625] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b1141bf-489b-40c3-b257-720839d7f670 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.650969] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd9127c3-72f0-47a1-967e-4c0ca7c907a8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.656868] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-dab938bf-c818-479f-b806-6c3c4b80f244 tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] [instance: 7db1b086-942e-4890-8750-0d717e522786] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1542.657475] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-eb3c8987-53d2-4943-9367-d90e07c8c1fc {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.669559] env[62405]: DEBUG nova.compute.provider_tree [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1542.672349] env[62405]: DEBUG oslo_vmware.api [None req-dab938bf-c818-479f-b806-6c3c4b80f244 tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Waiting for the task: (returnval){ [ 1542.672349] env[62405]: value = "task-1946889" [ 1542.672349] env[62405]: _type = "Task" [ 1542.672349] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1542.681760] env[62405]: DEBUG oslo_vmware.api [None req-dab938bf-c818-479f-b806-6c3c4b80f244 tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Task: {'id': task-1946889, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1542.712252] env[62405]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-d57c34c7-90f9-4eb6-9e86-3b4ccc548af0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.732639] env[62405]: DEBUG oslo_vmware.api [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]521bc675-4207-de67-d38e-5b451a5a64fd, 'name': SearchDatastore_Task, 'duration_secs': 0.030067} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1542.734266] env[62405]: DEBUG oslo_concurrency.lockutils [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1542.734649] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [instance: 777ddb84-25b9-4da6-be6b-a2289dbf510a] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1542.735143] env[62405]: DEBUG oslo_concurrency.lockutils [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1542.735408] env[62405]: DEBUG oslo_concurrency.lockutils [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1542.735554] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1542.737023] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-83d49d97-2f82-4af0-b0a8-00600994cdfd {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.747318] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1542.747318] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1542.747660] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c79d188e-c5b4-4c7d-af4b-636a4abdf409 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.757936] env[62405]: DEBUG oslo_vmware.api [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Waiting for the task: (returnval){ [ 1542.757936] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52c9a049-e369-36b2-282b-234148975449" [ 1542.757936] env[62405]: _type = "Task" [ 1542.757936] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1542.766705] env[62405]: DEBUG oslo_vmware.api [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52c9a049-e369-36b2-282b-234148975449, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1542.922784] env[62405]: DEBUG nova.compute.manager [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] [instance: f8c6f99f-499f-4886-aae9-5f08969175f6] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1542.951055] env[62405]: DEBUG nova.virt.hardware [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1542.951348] env[62405]: DEBUG nova.virt.hardware [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1542.951508] env[62405]: DEBUG nova.virt.hardware [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1542.951690] env[62405]: DEBUG nova.virt.hardware [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1542.951835] env[62405]: DEBUG nova.virt.hardware [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1542.951980] env[62405]: DEBUG nova.virt.hardware [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1542.952214] env[62405]: DEBUG nova.virt.hardware [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1542.952450] env[62405]: DEBUG nova.virt.hardware [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1542.952531] env[62405]: DEBUG nova.virt.hardware [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1542.952691] env[62405]: DEBUG nova.virt.hardware [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1542.952919] env[62405]: DEBUG nova.virt.hardware [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1542.954599] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c848633a-8c02-4bd9-a2b9-fe6df483442c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.969682] env[62405]: INFO nova.compute.manager [None req-bd5321f9-66d9-41b1-b9cb-dd9e8b04008c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] [instance: fbedaa93-5968-4b42-b93e-201d2b44b32b] instance snapshotting [ 1542.972409] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8f7dd74-5913-40e2-a037-49340b9f9ab9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.977407] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21ee76d3-6940-4230-837b-c5ca4d4eca01 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.008724] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f16013d8-19c4-49b6-9a2c-a2561a67c9fd {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.139088] env[62405]: DEBUG oslo_concurrency.lockutils [None req-14ed0941-12b7-4219-9ade-8830ebc34cc2 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Releasing lock "refresh_cache-058682a1-5240-4414-9203-c612ecd12999" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1543.189033] env[62405]: DEBUG oslo_vmware.api [None req-dab938bf-c818-479f-b806-6c3c4b80f244 tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Task: {'id': task-1946889, 'name': PowerOffVM_Task, 'duration_secs': 0.195908} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1543.189312] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-dab938bf-c818-479f-b806-6c3c4b80f244 tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] [instance: 7db1b086-942e-4890-8750-0d717e522786] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1543.189474] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-dab938bf-c818-479f-b806-6c3c4b80f244 tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] [instance: 7db1b086-942e-4890-8750-0d717e522786] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1543.189719] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-439481b2-444d-479d-a237-864e0d4deff1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.197232] env[62405]: ERROR nova.scheduler.client.report [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] [req-3b1383f4-6ae4-4a26-bd66-56274e38ad51] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7d5eded7-a501-4fa6-b1d3-60e273d555d7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-3b1383f4-6ae4-4a26-bd66-56274e38ad51"}]} [ 1543.215862] env[62405]: DEBUG nova.scheduler.client.report [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Refreshing inventories for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1543.236910] env[62405]: DEBUG nova.scheduler.client.report [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Updating ProviderTree inventory for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1543.237160] env[62405]: DEBUG nova.compute.provider_tree [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1543.250147] env[62405]: DEBUG nova.scheduler.client.report [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Refreshing aggregate associations for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7, aggregates: None {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1543.271417] env[62405]: DEBUG oslo_vmware.api [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52c9a049-e369-36b2-282b-234148975449, 'name': SearchDatastore_Task, 'duration_secs': 0.01204} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1543.272517] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-081a89c5-4c73-4fe1-a074-993d888de021 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.279659] env[62405]: DEBUG oslo_vmware.api [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Waiting for the task: (returnval){ [ 1543.279659] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]527f4d37-1473-523a-3980-259f5bbc82ab" [ 1543.279659] env[62405]: _type = "Task" [ 1543.279659] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1543.280572] env[62405]: DEBUG nova.scheduler.client.report [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Refreshing trait associations for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1543.301277] env[62405]: DEBUG oslo_vmware.api [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]527f4d37-1473-523a-3980-259f5bbc82ab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1543.377429] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-dab938bf-c818-479f-b806-6c3c4b80f244 tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] [instance: 7db1b086-942e-4890-8750-0d717e522786] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1543.377656] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-dab938bf-c818-479f-b806-6c3c4b80f244 tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] [instance: 7db1b086-942e-4890-8750-0d717e522786] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1543.377836] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-dab938bf-c818-479f-b806-6c3c4b80f244 tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Deleting the datastore file [datastore1] 7db1b086-942e-4890-8750-0d717e522786 {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1543.381029] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-91ce9161-2aff-4fff-afc0-ad09d5da0f3f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.389918] env[62405]: DEBUG oslo_vmware.api [None req-dab938bf-c818-479f-b806-6c3c4b80f244 tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Waiting for the task: (returnval){ [ 1543.389918] env[62405]: value = "task-1946891" [ 1543.389918] env[62405]: _type = "Task" [ 1543.389918] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1543.403120] env[62405]: DEBUG oslo_vmware.api [None req-dab938bf-c818-479f-b806-6c3c4b80f244 tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Task: {'id': task-1946891, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1543.523237] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-bd5321f9-66d9-41b1-b9cb-dd9e8b04008c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] [instance: fbedaa93-5968-4b42-b93e-201d2b44b32b] Creating Snapshot of the VM instance {{(pid=62405) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1543.523237] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-acd64eae-8e4b-4f4f-b13c-99a4f6202424 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.538502] env[62405]: DEBUG oslo_vmware.api [None req-bd5321f9-66d9-41b1-b9cb-dd9e8b04008c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Waiting for the task: (returnval){ [ 1543.538502] env[62405]: value = "task-1946892" [ 1543.538502] env[62405]: _type = "Task" [ 1543.538502] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1543.566221] env[62405]: DEBUG oslo_vmware.api [None req-bd5321f9-66d9-41b1-b9cb-dd9e8b04008c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Task: {'id': task-1946892, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1543.642219] env[62405]: DEBUG nova.network.neutron [req-38a2d39d-7c79-4364-87f4-b16e0013826b req-946d049b-7b39-4590-a4ca-476751cfc10f service nova] [instance: 777ddb84-25b9-4da6-be6b-a2289dbf510a] Updated VIF entry in instance network info cache for port c3206a84-3d77-4640-bfae-253a30dfa63c. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1543.642715] env[62405]: DEBUG nova.network.neutron [req-38a2d39d-7c79-4364-87f4-b16e0013826b req-946d049b-7b39-4590-a4ca-476751cfc10f service nova] [instance: 777ddb84-25b9-4da6-be6b-a2289dbf510a] Updating instance_info_cache with network_info: [{"id": "c3206a84-3d77-4640-bfae-253a30dfa63c", "address": "fa:16:3e:a2:56:1e", "network": {"id": "5f90763f-2c20-4d8a-9274-7e692071a6cd", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1386170130-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "18b4edb74b5d4f7a95565aebf78c444f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d8383707-f093-40a7-a5ba-31b0e07cac45", "external-id": "cl2-zone-18", "segmentation_id": 18, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc3206a84-3d", "ovs_interfaceid": "c3206a84-3d77-4640-bfae-253a30dfa63c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1543.670688] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3889e9d3-5aa0-4249-8e4d-42fbefab31a1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.727404] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-665bedfd-98ea-4fa3-834d-2a45465eed03 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.742829] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-14ed0941-12b7-4219-9ade-8830ebc34cc2 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 058682a1-5240-4414-9203-c612ecd12999] Updating instance '058682a1-5240-4414-9203-c612ecd12999' progress to 83 {{(pid=62405) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1543.756235] env[62405]: DEBUG nova.network.neutron [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] [instance: f8c6f99f-499f-4886-aae9-5f08969175f6] Successfully updated port: 1b9bb4b7-58d5-4182-ad5b-0a10e3a34546 {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1543.796842] env[62405]: DEBUG oslo_vmware.api [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]527f4d37-1473-523a-3980-259f5bbc82ab, 'name': SearchDatastore_Task, 'duration_secs': 0.017949} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1543.797137] env[62405]: DEBUG oslo_concurrency.lockutils [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1543.797445] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 777ddb84-25b9-4da6-be6b-a2289dbf510a/777ddb84-25b9-4da6-be6b-a2289dbf510a.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1543.797672] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-23ec825d-5a1b-4ad5-aead-5f89f6cba1e9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.815198] env[62405]: DEBUG oslo_vmware.api [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Waiting for the task: (returnval){ [ 1543.815198] env[62405]: value = "task-1946893" [ 1543.815198] env[62405]: _type = "Task" [ 1543.815198] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1543.844464] env[62405]: DEBUG oslo_vmware.api [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Task: {'id': task-1946893, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1543.907154] env[62405]: DEBUG oslo_vmware.api [None req-dab938bf-c818-479f-b806-6c3c4b80f244 tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Task: {'id': task-1946891, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.305108} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1543.907154] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-dab938bf-c818-479f-b806-6c3c4b80f244 tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1543.907154] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-dab938bf-c818-479f-b806-6c3c4b80f244 tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] [instance: 7db1b086-942e-4890-8750-0d717e522786] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1543.907154] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-dab938bf-c818-479f-b806-6c3c4b80f244 tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] [instance: 7db1b086-942e-4890-8750-0d717e522786] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1543.907436] env[62405]: INFO nova.compute.manager [None req-dab938bf-c818-479f-b806-6c3c4b80f244 tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] [instance: 7db1b086-942e-4890-8750-0d717e522786] Took 1.30 seconds to destroy the instance on the hypervisor. [ 1543.907478] env[62405]: DEBUG oslo.service.loopingcall [None req-dab938bf-c818-479f-b806-6c3c4b80f244 tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1543.907658] env[62405]: DEBUG nova.compute.manager [-] [instance: 7db1b086-942e-4890-8750-0d717e522786] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1543.907738] env[62405]: DEBUG nova.network.neutron [-] [instance: 7db1b086-942e-4890-8750-0d717e522786] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1543.971768] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-374f9d35-886c-4030-837d-c94dc8d5eb54 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.988820] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db15c005-6ec2-48a3-958e-86a7a8177a54 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.032471] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98affa8f-2b86-409c-aa79-f73f28289ce6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.045391] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1f08ad2-1a57-4282-84a1-5631c0b47833 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.064977] env[62405]: DEBUG nova.compute.provider_tree [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1544.070985] env[62405]: DEBUG oslo_vmware.api [None req-bd5321f9-66d9-41b1-b9cb-dd9e8b04008c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Task: {'id': task-1946892, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1544.146466] env[62405]: DEBUG oslo_concurrency.lockutils [req-38a2d39d-7c79-4364-87f4-b16e0013826b req-946d049b-7b39-4590-a4ca-476751cfc10f service nova] Releasing lock "refresh_cache-777ddb84-25b9-4da6-be6b-a2289dbf510a" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1544.262207] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-14ed0941-12b7-4219-9ade-8830ebc34cc2 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 058682a1-5240-4414-9203-c612ecd12999] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1544.262207] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-24982b55-4fa6-43f4-9510-d9a5272883d5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.262207] env[62405]: DEBUG oslo_concurrency.lockutils [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Acquiring lock "refresh_cache-f8c6f99f-499f-4886-aae9-5f08969175f6" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1544.263715] env[62405]: DEBUG oslo_concurrency.lockutils [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Acquired lock "refresh_cache-f8c6f99f-499f-4886-aae9-5f08969175f6" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1544.263945] env[62405]: DEBUG nova.network.neutron [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] [instance: f8c6f99f-499f-4886-aae9-5f08969175f6] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1544.275801] env[62405]: DEBUG oslo_vmware.api [None req-14ed0941-12b7-4219-9ade-8830ebc34cc2 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Waiting for the task: (returnval){ [ 1544.275801] env[62405]: value = "task-1946894" [ 1544.275801] env[62405]: _type = "Task" [ 1544.275801] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1544.294776] env[62405]: DEBUG oslo_vmware.api [None req-14ed0941-12b7-4219-9ade-8830ebc34cc2 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Task: {'id': task-1946894, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1544.335546] env[62405]: DEBUG oslo_vmware.api [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Task: {'id': task-1946893, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1544.556018] env[62405]: DEBUG oslo_vmware.api [None req-bd5321f9-66d9-41b1-b9cb-dd9e8b04008c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Task: {'id': task-1946892, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1544.640598] env[62405]: DEBUG nova.scheduler.client.report [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Updated inventory for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with generation 52 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1544.641479] env[62405]: DEBUG nova.compute.provider_tree [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Updating resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 generation from 52 to 53 during operation: update_inventory {{(pid=62405) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1544.642637] env[62405]: DEBUG nova.compute.provider_tree [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1544.795989] env[62405]: DEBUG oslo_vmware.api [None req-14ed0941-12b7-4219-9ade-8830ebc34cc2 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Task: {'id': task-1946894, 'name': PowerOnVM_Task} progress is 1%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1544.809810] env[62405]: DEBUG nova.network.neutron [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] [instance: f8c6f99f-499f-4886-aae9-5f08969175f6] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1544.830810] env[62405]: DEBUG oslo_vmware.api [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Task: {'id': task-1946893, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.743915} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1544.834167] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 777ddb84-25b9-4da6-be6b-a2289dbf510a/777ddb84-25b9-4da6-be6b-a2289dbf510a.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1544.834788] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [instance: 777ddb84-25b9-4da6-be6b-a2289dbf510a] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1544.835070] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b4eacf0f-c072-430a-a85f-f728a683bd1c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.848267] env[62405]: DEBUG oslo_vmware.api [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Waiting for the task: (returnval){ [ 1544.848267] env[62405]: value = "task-1946895" [ 1544.848267] env[62405]: _type = "Task" [ 1544.848267] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1544.858038] env[62405]: DEBUG nova.compute.manager [req-e0c34c54-7d01-4eb8-ac77-e4eeb25dc8d3 req-357083a6-5ee1-432f-9953-0e02de86958b service nova] [instance: f8c6f99f-499f-4886-aae9-5f08969175f6] Received event network-vif-plugged-1b9bb4b7-58d5-4182-ad5b-0a10e3a34546 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1544.858346] env[62405]: DEBUG oslo_concurrency.lockutils [req-e0c34c54-7d01-4eb8-ac77-e4eeb25dc8d3 req-357083a6-5ee1-432f-9953-0e02de86958b service nova] Acquiring lock "f8c6f99f-499f-4886-aae9-5f08969175f6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1544.858747] env[62405]: DEBUG oslo_concurrency.lockutils [req-e0c34c54-7d01-4eb8-ac77-e4eeb25dc8d3 req-357083a6-5ee1-432f-9953-0e02de86958b service nova] Lock "f8c6f99f-499f-4886-aae9-5f08969175f6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1544.858949] env[62405]: DEBUG oslo_concurrency.lockutils [req-e0c34c54-7d01-4eb8-ac77-e4eeb25dc8d3 req-357083a6-5ee1-432f-9953-0e02de86958b service nova] Lock "f8c6f99f-499f-4886-aae9-5f08969175f6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1544.859151] env[62405]: DEBUG nova.compute.manager [req-e0c34c54-7d01-4eb8-ac77-e4eeb25dc8d3 req-357083a6-5ee1-432f-9953-0e02de86958b service nova] [instance: f8c6f99f-499f-4886-aae9-5f08969175f6] No waiting events found dispatching network-vif-plugged-1b9bb4b7-58d5-4182-ad5b-0a10e3a34546 {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1544.859326] env[62405]: WARNING nova.compute.manager [req-e0c34c54-7d01-4eb8-ac77-e4eeb25dc8d3 req-357083a6-5ee1-432f-9953-0e02de86958b service nova] [instance: f8c6f99f-499f-4886-aae9-5f08969175f6] Received unexpected event network-vif-plugged-1b9bb4b7-58d5-4182-ad5b-0a10e3a34546 for instance with vm_state building and task_state spawning. [ 1544.859628] env[62405]: DEBUG nova.compute.manager [req-e0c34c54-7d01-4eb8-ac77-e4eeb25dc8d3 req-357083a6-5ee1-432f-9953-0e02de86958b service nova] [instance: f8c6f99f-499f-4886-aae9-5f08969175f6] Received event network-changed-1b9bb4b7-58d5-4182-ad5b-0a10e3a34546 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1544.860029] env[62405]: DEBUG nova.compute.manager [req-e0c34c54-7d01-4eb8-ac77-e4eeb25dc8d3 req-357083a6-5ee1-432f-9953-0e02de86958b service nova] [instance: f8c6f99f-499f-4886-aae9-5f08969175f6] Refreshing instance network info cache due to event network-changed-1b9bb4b7-58d5-4182-ad5b-0a10e3a34546. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1544.860417] env[62405]: DEBUG oslo_concurrency.lockutils [req-e0c34c54-7d01-4eb8-ac77-e4eeb25dc8d3 req-357083a6-5ee1-432f-9953-0e02de86958b service nova] Acquiring lock "refresh_cache-f8c6f99f-499f-4886-aae9-5f08969175f6" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1544.869316] env[62405]: DEBUG oslo_vmware.api [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Task: {'id': task-1946895, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1544.990359] env[62405]: DEBUG nova.network.neutron [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] [instance: f8c6f99f-499f-4886-aae9-5f08969175f6] Updating instance_info_cache with network_info: [{"id": "1b9bb4b7-58d5-4182-ad5b-0a10e3a34546", "address": "fa:16:3e:39:9b:cb", "network": {"id": "1df319a9-509c-4707-ad01-fc460b61420b", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-2109870983-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4d7f990209644a10a5b12c49517e0196", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "132fdc50-e144-4a9b-8d77-6378eec02d9b", "external-id": "nsx-vlan-transportzone-118", "segmentation_id": 118, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b9bb4b7-58", "ovs_interfaceid": "1b9bb4b7-58d5-4182-ad5b-0a10e3a34546", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1545.053496] env[62405]: DEBUG oslo_vmware.api [None req-bd5321f9-66d9-41b1-b9cb-dd9e8b04008c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Task: {'id': task-1946892, 'name': CreateSnapshot_Task, 'duration_secs': 1.222425} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1545.053496] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-bd5321f9-66d9-41b1-b9cb-dd9e8b04008c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] [instance: fbedaa93-5968-4b42-b93e-201d2b44b32b] Created Snapshot of the VM instance {{(pid=62405) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1545.054459] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36b18a91-fbb4-451b-99a7-c7e47a99a52d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.082858] env[62405]: DEBUG nova.network.neutron [-] [instance: 7db1b086-942e-4890-8750-0d717e522786] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1545.153287] env[62405]: DEBUG oslo_concurrency.lockutils [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.267s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1545.153287] env[62405]: DEBUG nova.compute.manager [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] [instance: b21dc1e7-dacd-4154-9bc3-0fa3774695a8] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1545.155367] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ca943265-0b77-4b99-b254-a845228e25bf tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.803s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1545.155775] env[62405]: DEBUG nova.objects.instance [None req-ca943265-0b77-4b99-b254-a845228e25bf tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Lazy-loading 'resources' on Instance uuid 801e7086-5742-4a04-962c-7546284aa12d {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1545.290268] env[62405]: DEBUG oslo_vmware.api [None req-14ed0941-12b7-4219-9ade-8830ebc34cc2 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Task: {'id': task-1946894, 'name': PowerOnVM_Task} progress is 64%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1545.357296] env[62405]: DEBUG oslo_vmware.api [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Task: {'id': task-1946895, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076362} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1545.357296] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [instance: 777ddb84-25b9-4da6-be6b-a2289dbf510a] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1545.359160] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ba9c5d0-24d6-405a-8d0c-7b0792481360 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.384671] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [instance: 777ddb84-25b9-4da6-be6b-a2289dbf510a] Reconfiguring VM instance instance-00000015 to attach disk [datastore1] 777ddb84-25b9-4da6-be6b-a2289dbf510a/777ddb84-25b9-4da6-be6b-a2289dbf510a.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1545.385374] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5196d104-3a39-4249-96cc-35647c0a2e33 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.401184] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1545.407603] env[62405]: DEBUG oslo_vmware.api [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Waiting for the task: (returnval){ [ 1545.407603] env[62405]: value = "task-1946896" [ 1545.407603] env[62405]: _type = "Task" [ 1545.407603] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1545.416526] env[62405]: DEBUG oslo_vmware.api [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Task: {'id': task-1946896, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1545.494797] env[62405]: DEBUG oslo_concurrency.lockutils [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Releasing lock "refresh_cache-f8c6f99f-499f-4886-aae9-5f08969175f6" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1545.495085] env[62405]: DEBUG nova.compute.manager [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] [instance: f8c6f99f-499f-4886-aae9-5f08969175f6] Instance network_info: |[{"id": "1b9bb4b7-58d5-4182-ad5b-0a10e3a34546", "address": "fa:16:3e:39:9b:cb", "network": {"id": "1df319a9-509c-4707-ad01-fc460b61420b", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-2109870983-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4d7f990209644a10a5b12c49517e0196", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "132fdc50-e144-4a9b-8d77-6378eec02d9b", "external-id": "nsx-vlan-transportzone-118", "segmentation_id": 118, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b9bb4b7-58", "ovs_interfaceid": "1b9bb4b7-58d5-4182-ad5b-0a10e3a34546", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1545.495385] env[62405]: DEBUG oslo_concurrency.lockutils [req-e0c34c54-7d01-4eb8-ac77-e4eeb25dc8d3 req-357083a6-5ee1-432f-9953-0e02de86958b service nova] Acquired lock "refresh_cache-f8c6f99f-499f-4886-aae9-5f08969175f6" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1545.495566] env[62405]: DEBUG nova.network.neutron [req-e0c34c54-7d01-4eb8-ac77-e4eeb25dc8d3 req-357083a6-5ee1-432f-9953-0e02de86958b service nova] [instance: f8c6f99f-499f-4886-aae9-5f08969175f6] Refreshing network info cache for port 1b9bb4b7-58d5-4182-ad5b-0a10e3a34546 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1545.496745] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] [instance: f8c6f99f-499f-4886-aae9-5f08969175f6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:39:9b:cb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '132fdc50-e144-4a9b-8d77-6378eec02d9b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1b9bb4b7-58d5-4182-ad5b-0a10e3a34546', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1545.504355] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Creating folder: Project (4d7f990209644a10a5b12c49517e0196). Parent ref: group-v401284. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1545.504467] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7f3f2e33-3d5f-4e31-b557-0f93504ec6b7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.518287] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Created folder: Project (4d7f990209644a10a5b12c49517e0196) in parent group-v401284. [ 1545.518766] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Creating folder: Instances. Parent ref: group-v401352. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1545.518766] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1a5afa47-9343-40ec-841d-056abd5aeefe {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.528905] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Created folder: Instances in parent group-v401352. [ 1545.529168] env[62405]: DEBUG oslo.service.loopingcall [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1545.529357] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f8c6f99f-499f-4886-aae9-5f08969175f6] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1545.529566] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c663428d-db10-46a4-b287-f9beff33ee85 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.552644] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1545.552644] env[62405]: value = "task-1946899" [ 1545.552644] env[62405]: _type = "Task" [ 1545.552644] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1545.561059] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946899, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1545.574951] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-bd5321f9-66d9-41b1-b9cb-dd9e8b04008c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] [instance: fbedaa93-5968-4b42-b93e-201d2b44b32b] Creating linked-clone VM from snapshot {{(pid=62405) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1545.575313] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-6a5c65a2-0f3b-4757-81c3-53ea0f1cbad8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.585794] env[62405]: INFO nova.compute.manager [-] [instance: 7db1b086-942e-4890-8750-0d717e522786] Took 1.68 seconds to deallocate network for instance. [ 1545.587216] env[62405]: DEBUG oslo_vmware.api [None req-bd5321f9-66d9-41b1-b9cb-dd9e8b04008c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Waiting for the task: (returnval){ [ 1545.587216] env[62405]: value = "task-1946900" [ 1545.587216] env[62405]: _type = "Task" [ 1545.587216] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1545.601873] env[62405]: DEBUG oslo_vmware.api [None req-bd5321f9-66d9-41b1-b9cb-dd9e8b04008c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Task: {'id': task-1946900, 'name': CloneVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1545.664776] env[62405]: DEBUG nova.compute.utils [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1545.670023] env[62405]: DEBUG nova.compute.manager [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] [instance: b21dc1e7-dacd-4154-9bc3-0fa3774695a8] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1545.670023] env[62405]: DEBUG nova.network.neutron [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] [instance: b21dc1e7-dacd-4154-9bc3-0fa3774695a8] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1545.760755] env[62405]: DEBUG nova.policy [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2ae271e171d54bf4b1af909e68d3e449', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '915d6ea5e5184efab9fbeda21e3b8a64', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1545.793471] env[62405]: DEBUG oslo_vmware.api [None req-14ed0941-12b7-4219-9ade-8830ebc34cc2 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Task: {'id': task-1946894, 'name': PowerOnVM_Task, 'duration_secs': 1.184146} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1545.793471] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-14ed0941-12b7-4219-9ade-8830ebc34cc2 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 058682a1-5240-4414-9203-c612ecd12999] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1545.793471] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-14ed0941-12b7-4219-9ade-8830ebc34cc2 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 058682a1-5240-4414-9203-c612ecd12999] Updating instance '058682a1-5240-4414-9203-c612ecd12999' progress to 100 {{(pid=62405) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1545.918177] env[62405]: DEBUG oslo_vmware.api [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Task: {'id': task-1946896, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1546.073029] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946899, 'name': CreateVM_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1546.099350] env[62405]: DEBUG oslo_concurrency.lockutils [None req-dab938bf-c818-479f-b806-6c3c4b80f244 tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1546.099902] env[62405]: DEBUG oslo_vmware.api [None req-bd5321f9-66d9-41b1-b9cb-dd9e8b04008c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Task: {'id': task-1946900, 'name': CloneVM_Task} progress is 94%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1546.171108] env[62405]: DEBUG nova.compute.manager [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] [instance: b21dc1e7-dacd-4154-9bc3-0fa3774695a8] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1546.247848] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5712817c-f22e-4b58-8ae0-dbb240a74106 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.259975] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-743d54e0-e638-40ee-a076-33ab9ad130f3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.296791] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d52874e9-9c95-42ba-8e18-98bd5fce2cf6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.310170] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c4a951e-1a06-4de7-94d5-b1e85cc0c0a8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.328500] env[62405]: DEBUG nova.compute.provider_tree [None req-ca943265-0b77-4b99-b254-a845228e25bf tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1546.331750] env[62405]: DEBUG nova.network.neutron [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] [instance: b21dc1e7-dacd-4154-9bc3-0fa3774695a8] Successfully created port: e3b36820-3fc9-4b42-820d-9018b302c322 {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1546.391489] env[62405]: DEBUG nova.network.neutron [req-e0c34c54-7d01-4eb8-ac77-e4eeb25dc8d3 req-357083a6-5ee1-432f-9953-0e02de86958b service nova] [instance: f8c6f99f-499f-4886-aae9-5f08969175f6] Updated VIF entry in instance network info cache for port 1b9bb4b7-58d5-4182-ad5b-0a10e3a34546. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1546.391888] env[62405]: DEBUG nova.network.neutron [req-e0c34c54-7d01-4eb8-ac77-e4eeb25dc8d3 req-357083a6-5ee1-432f-9953-0e02de86958b service nova] [instance: f8c6f99f-499f-4886-aae9-5f08969175f6] Updating instance_info_cache with network_info: [{"id": "1b9bb4b7-58d5-4182-ad5b-0a10e3a34546", "address": "fa:16:3e:39:9b:cb", "network": {"id": "1df319a9-509c-4707-ad01-fc460b61420b", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-2109870983-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4d7f990209644a10a5b12c49517e0196", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "132fdc50-e144-4a9b-8d77-6378eec02d9b", "external-id": "nsx-vlan-transportzone-118", "segmentation_id": 118, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b9bb4b7-58", "ovs_interfaceid": "1b9bb4b7-58d5-4182-ad5b-0a10e3a34546", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1546.419053] env[62405]: DEBUG oslo_vmware.api [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Task: {'id': task-1946896, 'name': ReconfigVM_Task, 'duration_secs': 0.543201} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1546.419329] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [instance: 777ddb84-25b9-4da6-be6b-a2289dbf510a] Reconfigured VM instance instance-00000015 to attach disk [datastore1] 777ddb84-25b9-4da6-be6b-a2289dbf510a/777ddb84-25b9-4da6-be6b-a2289dbf510a.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1546.419981] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f92f541b-827a-4b3f-bdb6-b2d4a57d2c3c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.429026] env[62405]: DEBUG oslo_vmware.api [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Waiting for the task: (returnval){ [ 1546.429026] env[62405]: value = "task-1946901" [ 1546.429026] env[62405]: _type = "Task" [ 1546.429026] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1546.441029] env[62405]: DEBUG oslo_vmware.api [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Task: {'id': task-1946901, 'name': Rename_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1546.568526] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946899, 'name': CreateVM_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1546.599565] env[62405]: DEBUG oslo_vmware.api [None req-bd5321f9-66d9-41b1-b9cb-dd9e8b04008c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Task: {'id': task-1946900, 'name': CloneVM_Task} progress is 94%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1546.877767] env[62405]: DEBUG nova.scheduler.client.report [None req-ca943265-0b77-4b99-b254-a845228e25bf tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Updated inventory for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with generation 53 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1546.880992] env[62405]: DEBUG nova.compute.provider_tree [None req-ca943265-0b77-4b99-b254-a845228e25bf tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Updating resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 generation from 53 to 54 during operation: update_inventory {{(pid=62405) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1546.880992] env[62405]: DEBUG nova.compute.provider_tree [None req-ca943265-0b77-4b99-b254-a845228e25bf tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1546.900838] env[62405]: DEBUG oslo_concurrency.lockutils [req-e0c34c54-7d01-4eb8-ac77-e4eeb25dc8d3 req-357083a6-5ee1-432f-9953-0e02de86958b service nova] Releasing lock "refresh_cache-f8c6f99f-499f-4886-aae9-5f08969175f6" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1546.942829] env[62405]: DEBUG oslo_vmware.api [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Task: {'id': task-1946901, 'name': Rename_Task, 'duration_secs': 0.248214} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1546.945874] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [instance: 777ddb84-25b9-4da6-be6b-a2289dbf510a] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1546.946579] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b7f901ba-12e4-4279-a94f-4615631ab57f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.959102] env[62405]: DEBUG oslo_vmware.api [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Waiting for the task: (returnval){ [ 1546.959102] env[62405]: value = "task-1946902" [ 1546.959102] env[62405]: _type = "Task" [ 1546.959102] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1546.971024] env[62405]: DEBUG oslo_vmware.api [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Task: {'id': task-1946902, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1547.019330] env[62405]: DEBUG nova.compute.manager [req-f3f63431-73d2-4965-98e4-03a53f74f59b req-0becf629-2362-44ab-98b7-da5fc6cecb5f service nova] [instance: 7db1b086-942e-4890-8750-0d717e522786] Received event network-vif-deleted-acb33455-b824-40fd-99bd-4628778412a0 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1547.066120] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946899, 'name': CreateVM_Task, 'duration_secs': 1.415626} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1547.066391] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f8c6f99f-499f-4886-aae9-5f08969175f6] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1547.067217] env[62405]: DEBUG oslo_concurrency.lockutils [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1547.067455] env[62405]: DEBUG oslo_concurrency.lockutils [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1547.067881] env[62405]: DEBUG oslo_concurrency.lockutils [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1547.069368] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6c354df8-928b-4111-b204-ceac91972d85 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.074933] env[62405]: DEBUG oslo_vmware.api [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Waiting for the task: (returnval){ [ 1547.074933] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52716ae2-c543-2db6-983e-785f352c75e2" [ 1547.074933] env[62405]: _type = "Task" [ 1547.074933] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1547.086901] env[62405]: DEBUG oslo_vmware.api [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52716ae2-c543-2db6-983e-785f352c75e2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1547.099508] env[62405]: DEBUG oslo_vmware.api [None req-bd5321f9-66d9-41b1-b9cb-dd9e8b04008c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Task: {'id': task-1946900, 'name': CloneVM_Task} progress is 95%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1547.184048] env[62405]: DEBUG nova.compute.manager [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] [instance: b21dc1e7-dacd-4154-9bc3-0fa3774695a8] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1547.215773] env[62405]: DEBUG nova.virt.hardware [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:21:23Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='1517717022',id=21,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_0-2635190',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1547.216063] env[62405]: DEBUG nova.virt.hardware [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1547.216234] env[62405]: DEBUG nova.virt.hardware [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1547.216422] env[62405]: DEBUG nova.virt.hardware [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1547.216572] env[62405]: DEBUG nova.virt.hardware [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1547.216720] env[62405]: DEBUG nova.virt.hardware [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1547.216929] env[62405]: DEBUG nova.virt.hardware [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1547.217105] env[62405]: DEBUG nova.virt.hardware [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1547.217399] env[62405]: DEBUG nova.virt.hardware [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1547.217463] env[62405]: DEBUG nova.virt.hardware [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1547.217608] env[62405]: DEBUG nova.virt.hardware [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1547.218597] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64ff46ef-9096-40cc-b64a-41edeedb05e9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.227605] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baf4403f-4a41-4c70-9ceb-28b8170e849e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.388611] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ca943265-0b77-4b99-b254-a845228e25bf tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.233s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1547.391227] env[62405]: DEBUG oslo_concurrency.lockutils [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.019s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1547.393374] env[62405]: INFO nova.compute.claims [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: ca4d11fe-1d0f-468b-a2f4-21c5b84342ab] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1547.432789] env[62405]: INFO nova.scheduler.client.report [None req-ca943265-0b77-4b99-b254-a845228e25bf tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Deleted allocations for instance 801e7086-5742-4a04-962c-7546284aa12d [ 1547.470029] env[62405]: DEBUG oslo_vmware.api [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Task: {'id': task-1946902, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1547.589163] env[62405]: DEBUG oslo_vmware.api [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52716ae2-c543-2db6-983e-785f352c75e2, 'name': SearchDatastore_Task, 'duration_secs': 0.02147} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1547.590707] env[62405]: DEBUG oslo_concurrency.lockutils [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1547.590707] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] [instance: f8c6f99f-499f-4886-aae9-5f08969175f6] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1547.591554] env[62405]: DEBUG oslo_concurrency.lockutils [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1547.591785] env[62405]: DEBUG oslo_concurrency.lockutils [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1547.591991] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1547.595727] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-950fef14-ef6f-46dc-a5fd-5f65471e18df {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.604197] env[62405]: DEBUG oslo_vmware.api [None req-bd5321f9-66d9-41b1-b9cb-dd9e8b04008c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Task: {'id': task-1946900, 'name': CloneVM_Task} progress is 100%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1547.943995] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ca943265-0b77-4b99-b254-a845228e25bf tempest-ServerDiagnosticsNegativeTest-1233087375 tempest-ServerDiagnosticsNegativeTest-1233087375-project-member] Lock "801e7086-5742-4a04-962c-7546284aa12d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.727s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1547.972916] env[62405]: DEBUG oslo_vmware.api [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Task: {'id': task-1946902, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1548.102964] env[62405]: DEBUG oslo_vmware.api [None req-bd5321f9-66d9-41b1-b9cb-dd9e8b04008c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Task: {'id': task-1946900, 'name': CloneVM_Task} progress is 100%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1548.246164] env[62405]: DEBUG nova.network.neutron [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] [instance: b21dc1e7-dacd-4154-9bc3-0fa3774695a8] Successfully updated port: e3b36820-3fc9-4b42-820d-9018b302c322 {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1548.332086] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5294a7b2-6b91-46a0-9297-5e141d4b9642 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Acquiring lock "058682a1-5240-4414-9203-c612ecd12999" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1548.332398] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5294a7b2-6b91-46a0-9297-5e141d4b9642 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Lock "058682a1-5240-4414-9203-c612ecd12999" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1548.332602] env[62405]: DEBUG nova.compute.manager [None req-5294a7b2-6b91-46a0-9297-5e141d4b9642 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 058682a1-5240-4414-9203-c612ecd12999] Going to confirm migration 1 {{(pid=62405) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1548.401057] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1548.473553] env[62405]: DEBUG oslo_vmware.api [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Task: {'id': task-1946902, 'name': PowerOnVM_Task, 'duration_secs': 1.511976} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1548.473931] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [instance: 777ddb84-25b9-4da6-be6b-a2289dbf510a] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1548.474247] env[62405]: INFO nova.compute.manager [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [instance: 777ddb84-25b9-4da6-be6b-a2289dbf510a] Took 9.65 seconds to spawn the instance on the hypervisor. [ 1548.474440] env[62405]: DEBUG nova.compute.manager [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [instance: 777ddb84-25b9-4da6-be6b-a2289dbf510a] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1548.475230] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1d4e6fb-d4c3-4aa3-8967-8d91e87c6308 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.603855] env[62405]: DEBUG oslo_vmware.api [None req-bd5321f9-66d9-41b1-b9cb-dd9e8b04008c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Task: {'id': task-1946900, 'name': CloneVM_Task} progress is 100%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1548.750738] env[62405]: DEBUG oslo_concurrency.lockutils [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Acquiring lock "refresh_cache-b21dc1e7-dacd-4154-9bc3-0fa3774695a8" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1548.750899] env[62405]: DEBUG oslo_concurrency.lockutils [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Acquired lock "refresh_cache-b21dc1e7-dacd-4154-9bc3-0fa3774695a8" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1548.751068] env[62405]: DEBUG nova.network.neutron [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] [instance: b21dc1e7-dacd-4154-9bc3-0fa3774695a8] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1548.944925] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5294a7b2-6b91-46a0-9297-5e141d4b9642 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Acquiring lock "refresh_cache-058682a1-5240-4414-9203-c612ecd12999" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1548.945128] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5294a7b2-6b91-46a0-9297-5e141d4b9642 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Acquired lock "refresh_cache-058682a1-5240-4414-9203-c612ecd12999" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1548.945306] env[62405]: DEBUG nova.network.neutron [None req-5294a7b2-6b91-46a0-9297-5e141d4b9642 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 058682a1-5240-4414-9203-c612ecd12999] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1548.945490] env[62405]: DEBUG nova.objects.instance [None req-5294a7b2-6b91-46a0-9297-5e141d4b9642 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Lazy-loading 'info_cache' on Instance uuid 058682a1-5240-4414-9203-c612ecd12999 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1549.006519] env[62405]: INFO nova.compute.manager [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [instance: 777ddb84-25b9-4da6-be6b-a2289dbf510a] Took 37.18 seconds to build instance. [ 1549.009238] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdd7acc2-b2ba-4d7d-a129-a472c9c1f12b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.018634] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e58bd1c1-4190-4920-a4ff-1422a0c78769 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.056428] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08a7f260-45e3-48bd-b36b-016bb4a2875f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.065657] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-149fd021-9814-4a86-bdab-2a24bd2b5ba5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.083021] env[62405]: DEBUG nova.compute.provider_tree [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1549.105955] env[62405]: DEBUG oslo_vmware.api [None req-bd5321f9-66d9-41b1-b9cb-dd9e8b04008c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Task: {'id': task-1946900, 'name': CloneVM_Task} progress is 100%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1549.294199] env[62405]: DEBUG nova.network.neutron [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] [instance: b21dc1e7-dacd-4154-9bc3-0fa3774695a8] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1549.300027] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1549.300027] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1549.300248] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cb88c38f-3d7a-4edd-886b-eea79e545b69 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.309584] env[62405]: DEBUG oslo_vmware.api [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Waiting for the task: (returnval){ [ 1549.309584] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52bdeb5d-2cef-f534-75d1-7f661401ca9d" [ 1549.309584] env[62405]: _type = "Task" [ 1549.309584] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1549.321488] env[62405]: DEBUG nova.compute.manager [req-69000fcc-388e-43a3-8275-f442c6cdc322 req-34aa7b0d-7241-440a-a6a9-b230d69d6b6c service nova] [instance: b21dc1e7-dacd-4154-9bc3-0fa3774695a8] Received event network-vif-plugged-e3b36820-3fc9-4b42-820d-9018b302c322 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1549.321580] env[62405]: DEBUG oslo_concurrency.lockutils [req-69000fcc-388e-43a3-8275-f442c6cdc322 req-34aa7b0d-7241-440a-a6a9-b230d69d6b6c service nova] Acquiring lock "b21dc1e7-dacd-4154-9bc3-0fa3774695a8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1549.321827] env[62405]: DEBUG oslo_concurrency.lockutils [req-69000fcc-388e-43a3-8275-f442c6cdc322 req-34aa7b0d-7241-440a-a6a9-b230d69d6b6c service nova] Lock "b21dc1e7-dacd-4154-9bc3-0fa3774695a8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1549.321960] env[62405]: DEBUG oslo_concurrency.lockutils [req-69000fcc-388e-43a3-8275-f442c6cdc322 req-34aa7b0d-7241-440a-a6a9-b230d69d6b6c service nova] Lock "b21dc1e7-dacd-4154-9bc3-0fa3774695a8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1549.322095] env[62405]: DEBUG nova.compute.manager [req-69000fcc-388e-43a3-8275-f442c6cdc322 req-34aa7b0d-7241-440a-a6a9-b230d69d6b6c service nova] [instance: b21dc1e7-dacd-4154-9bc3-0fa3774695a8] No waiting events found dispatching network-vif-plugged-e3b36820-3fc9-4b42-820d-9018b302c322 {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1549.322486] env[62405]: WARNING nova.compute.manager [req-69000fcc-388e-43a3-8275-f442c6cdc322 req-34aa7b0d-7241-440a-a6a9-b230d69d6b6c service nova] [instance: b21dc1e7-dacd-4154-9bc3-0fa3774695a8] Received unexpected event network-vif-plugged-e3b36820-3fc9-4b42-820d-9018b302c322 for instance with vm_state building and task_state spawning. [ 1549.322486] env[62405]: DEBUG nova.compute.manager [req-69000fcc-388e-43a3-8275-f442c6cdc322 req-34aa7b0d-7241-440a-a6a9-b230d69d6b6c service nova] [instance: b21dc1e7-dacd-4154-9bc3-0fa3774695a8] Received event network-changed-e3b36820-3fc9-4b42-820d-9018b302c322 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1549.322647] env[62405]: DEBUG nova.compute.manager [req-69000fcc-388e-43a3-8275-f442c6cdc322 req-34aa7b0d-7241-440a-a6a9-b230d69d6b6c service nova] [instance: b21dc1e7-dacd-4154-9bc3-0fa3774695a8] Refreshing instance network info cache due to event network-changed-e3b36820-3fc9-4b42-820d-9018b302c322. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1549.323899] env[62405]: DEBUG oslo_concurrency.lockutils [req-69000fcc-388e-43a3-8275-f442c6cdc322 req-34aa7b0d-7241-440a-a6a9-b230d69d6b6c service nova] Acquiring lock "refresh_cache-b21dc1e7-dacd-4154-9bc3-0fa3774695a8" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1549.331258] env[62405]: DEBUG oslo_vmware.api [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52bdeb5d-2cef-f534-75d1-7f661401ca9d, 'name': SearchDatastore_Task, 'duration_secs': 0.016084} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1549.335381] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-051424c9-8a5e-4b9e-bdf4-210de6dc023f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.344955] env[62405]: DEBUG oslo_vmware.api [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Waiting for the task: (returnval){ [ 1549.344955] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52719ae6-97b9-2ce6-5f68-e2dc07391835" [ 1549.344955] env[62405]: _type = "Task" [ 1549.344955] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1549.354812] env[62405]: DEBUG oslo_vmware.api [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52719ae6-97b9-2ce6-5f68-e2dc07391835, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1549.401262] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1549.401262] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1549.518399] env[62405]: DEBUG oslo_concurrency.lockutils [None req-20d2366d-4053-4b1b-b5d1-4422cf6460dc tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Lock "777ddb84-25b9-4da6-be6b-a2289dbf510a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 50.377s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1549.550978] env[62405]: DEBUG nova.network.neutron [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] [instance: b21dc1e7-dacd-4154-9bc3-0fa3774695a8] Updating instance_info_cache with network_info: [{"id": "e3b36820-3fc9-4b42-820d-9018b302c322", "address": "fa:16:3e:a3:2a:5c", "network": {"id": "890b933d-5687-4c3b-aab8-4c8d68c71772", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-315909913-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "915d6ea5e5184efab9fbeda21e3b8a64", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "399f3826-705c-45f7-9fe0-3a08a945151a", "external-id": "nsx-vlan-transportzone-936", "segmentation_id": 936, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape3b36820-3f", "ovs_interfaceid": "e3b36820-3fc9-4b42-820d-9018b302c322", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1549.587155] env[62405]: DEBUG nova.scheduler.client.report [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1549.610186] env[62405]: DEBUG oslo_vmware.api [None req-bd5321f9-66d9-41b1-b9cb-dd9e8b04008c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Task: {'id': task-1946900, 'name': CloneVM_Task, 'duration_secs': 3.733652} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1549.610186] env[62405]: INFO nova.virt.vmwareapi.vmops [None req-bd5321f9-66d9-41b1-b9cb-dd9e8b04008c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] [instance: fbedaa93-5968-4b42-b93e-201d2b44b32b] Created linked-clone VM from snapshot [ 1549.611407] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c9e5955-7202-4ca4-9611-2cd734285e4a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.623509] env[62405]: DEBUG nova.virt.vmwareapi.images [None req-bd5321f9-66d9-41b1-b9cb-dd9e8b04008c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] [instance: fbedaa93-5968-4b42-b93e-201d2b44b32b] Uploading image cc576845-9d46-42b4-b1ee-897e638ba48c {{(pid=62405) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1549.657974] env[62405]: DEBUG oslo_vmware.rw_handles [None req-bd5321f9-66d9-41b1-b9cb-dd9e8b04008c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1549.657974] env[62405]: value = "vm-401355" [ 1549.657974] env[62405]: _type = "VirtualMachine" [ 1549.657974] env[62405]: }. {{(pid=62405) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1549.659055] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-0de1d93b-6c05-4893-b515-60a589d7316d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.672098] env[62405]: DEBUG oslo_vmware.rw_handles [None req-bd5321f9-66d9-41b1-b9cb-dd9e8b04008c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Lease: (returnval){ [ 1549.672098] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5215b129-4c75-0dcf-75e4-9a17241465af" [ 1549.672098] env[62405]: _type = "HttpNfcLease" [ 1549.672098] env[62405]: } obtained for exporting VM: (result){ [ 1549.672098] env[62405]: value = "vm-401355" [ 1549.672098] env[62405]: _type = "VirtualMachine" [ 1549.672098] env[62405]: }. {{(pid=62405) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1549.672098] env[62405]: DEBUG oslo_vmware.api [None req-bd5321f9-66d9-41b1-b9cb-dd9e8b04008c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Waiting for the lease: (returnval){ [ 1549.672098] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5215b129-4c75-0dcf-75e4-9a17241465af" [ 1549.672098] env[62405]: _type = "HttpNfcLease" [ 1549.672098] env[62405]: } to be ready. {{(pid=62405) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1549.682201] env[62405]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1549.682201] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5215b129-4c75-0dcf-75e4-9a17241465af" [ 1549.682201] env[62405]: _type = "HttpNfcLease" [ 1549.682201] env[62405]: } is initializing. {{(pid=62405) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1549.858245] env[62405]: DEBUG oslo_vmware.api [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52719ae6-97b9-2ce6-5f68-e2dc07391835, 'name': SearchDatastore_Task, 'duration_secs': 0.016885} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1549.858557] env[62405]: DEBUG oslo_concurrency.lockutils [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1549.858799] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] f8c6f99f-499f-4886-aae9-5f08969175f6/f8c6f99f-499f-4886-aae9-5f08969175f6.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1549.859081] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-857e2c1d-0148-4c13-8310-7eb90f972f22 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.868018] env[62405]: DEBUG oslo_vmware.api [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Waiting for the task: (returnval){ [ 1549.868018] env[62405]: value = "task-1946904" [ 1549.868018] env[62405]: _type = "Task" [ 1549.868018] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1549.879488] env[62405]: DEBUG oslo_vmware.api [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Task: {'id': task-1946904, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1550.023355] env[62405]: DEBUG nova.compute.manager [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] [instance: 78b4c6ea-6f5b-40d8-8c4a-10332f176e0b] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1550.053697] env[62405]: DEBUG oslo_concurrency.lockutils [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Releasing lock "refresh_cache-b21dc1e7-dacd-4154-9bc3-0fa3774695a8" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1550.054036] env[62405]: DEBUG nova.compute.manager [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] [instance: b21dc1e7-dacd-4154-9bc3-0fa3774695a8] Instance network_info: |[{"id": "e3b36820-3fc9-4b42-820d-9018b302c322", "address": "fa:16:3e:a3:2a:5c", "network": {"id": "890b933d-5687-4c3b-aab8-4c8d68c71772", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-315909913-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "915d6ea5e5184efab9fbeda21e3b8a64", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "399f3826-705c-45f7-9fe0-3a08a945151a", "external-id": "nsx-vlan-transportzone-936", "segmentation_id": 936, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape3b36820-3f", "ovs_interfaceid": "e3b36820-3fc9-4b42-820d-9018b302c322", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1550.055021] env[62405]: DEBUG oslo_concurrency.lockutils [req-69000fcc-388e-43a3-8275-f442c6cdc322 req-34aa7b0d-7241-440a-a6a9-b230d69d6b6c service nova] Acquired lock "refresh_cache-b21dc1e7-dacd-4154-9bc3-0fa3774695a8" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1550.055246] env[62405]: DEBUG nova.network.neutron [req-69000fcc-388e-43a3-8275-f442c6cdc322 req-34aa7b0d-7241-440a-a6a9-b230d69d6b6c service nova] [instance: b21dc1e7-dacd-4154-9bc3-0fa3774695a8] Refreshing network info cache for port e3b36820-3fc9-4b42-820d-9018b302c322 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1550.056570] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] [instance: b21dc1e7-dacd-4154-9bc3-0fa3774695a8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a3:2a:5c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '399f3826-705c-45f7-9fe0-3a08a945151a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e3b36820-3fc9-4b42-820d-9018b302c322', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1550.068861] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Creating folder: Project (915d6ea5e5184efab9fbeda21e3b8a64). Parent ref: group-v401284. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1550.069507] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-97c465cb-f5ac-4e6b-8bf9-4d49d9890611 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.084366] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Created folder: Project (915d6ea5e5184efab9fbeda21e3b8a64) in parent group-v401284. [ 1550.084534] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Creating folder: Instances. Parent ref: group-v401356. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1550.087123] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f97b9ee2-8ec2-4539-b1b1-d981f89e49ea {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.099508] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Created folder: Instances in parent group-v401356. [ 1550.099758] env[62405]: DEBUG oslo.service.loopingcall [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1550.099946] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b21dc1e7-dacd-4154-9bc3-0fa3774695a8] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1550.100167] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d6d29ed5-f05d-4683-a482-80b668e6a3fd {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.126971] env[62405]: DEBUG oslo_concurrency.lockutils [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.736s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1550.127579] env[62405]: DEBUG nova.compute.manager [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: ca4d11fe-1d0f-468b-a2f4-21c5b84342ab] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1550.130731] env[62405]: DEBUG oslo_concurrency.lockutils [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.995s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1550.132287] env[62405]: INFO nova.compute.claims [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] [instance: 65462c7a-372e-4ba6-8f6d-e300080d65d0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1550.141248] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1550.141248] env[62405]: value = "task-1946907" [ 1550.141248] env[62405]: _type = "Task" [ 1550.141248] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1550.150419] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946907, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1550.189705] env[62405]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1550.189705] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5215b129-4c75-0dcf-75e4-9a17241465af" [ 1550.189705] env[62405]: _type = "HttpNfcLease" [ 1550.189705] env[62405]: } is ready. {{(pid=62405) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1550.191653] env[62405]: DEBUG oslo_vmware.rw_handles [None req-bd5321f9-66d9-41b1-b9cb-dd9e8b04008c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1550.191653] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5215b129-4c75-0dcf-75e4-9a17241465af" [ 1550.191653] env[62405]: _type = "HttpNfcLease" [ 1550.191653] env[62405]: }. {{(pid=62405) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1550.191653] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73521ae0-7767-4214-9622-6047c3099a08 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.205756] env[62405]: DEBUG oslo_vmware.rw_handles [None req-bd5321f9-66d9-41b1-b9cb-dd9e8b04008c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5229654f-d0cb-7056-f5fd-3a52b755e9e8/disk-0.vmdk from lease info. {{(pid=62405) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1550.206591] env[62405]: DEBUG oslo_vmware.rw_handles [None req-bd5321f9-66d9-41b1-b9cb-dd9e8b04008c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5229654f-d0cb-7056-f5fd-3a52b755e9e8/disk-0.vmdk for reading. {{(pid=62405) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1550.381996] env[62405]: DEBUG oslo_vmware.api [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Task: {'id': task-1946904, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1550.403432] env[62405]: DEBUG nova.network.neutron [None req-5294a7b2-6b91-46a0-9297-5e141d4b9642 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 058682a1-5240-4414-9203-c612ecd12999] Updating instance_info_cache with network_info: [{"id": "f2f99aa3-770a-41cb-bb49-775f9f0f2708", "address": "fa:16:3e:ed:e3:4c", "network": {"id": "9e3e6e3d-df77-428f-b577-e4a42e82ec43", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.165", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "69dc3a146cd14230b1180689e2fea090", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31e77685-b4dd-4810-80ef-24115ea9ea62", "external-id": "nsx-vlan-transportzone-56", "segmentation_id": 56, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf2f99aa3-77", "ovs_interfaceid": "f2f99aa3-770a-41cb-bb49-775f9f0f2708", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1550.544796] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1550.644400] env[62405]: DEBUG nova.compute.utils [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1550.644400] env[62405]: DEBUG nova.compute.manager [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: ca4d11fe-1d0f-468b-a2f4-21c5b84342ab] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1550.644669] env[62405]: DEBUG nova.network.neutron [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: ca4d11fe-1d0f-468b-a2f4-21c5b84342ab] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1550.660619] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946907, 'name': CreateVM_Task, 'duration_secs': 0.494581} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1550.660880] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b21dc1e7-dacd-4154-9bc3-0fa3774695a8] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1550.661545] env[62405]: DEBUG oslo_concurrency.lockutils [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1550.662155] env[62405]: DEBUG oslo_concurrency.lockutils [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1550.662155] env[62405]: DEBUG oslo_concurrency.lockutils [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1550.663172] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-06544efe-0821-41a4-af92-193409d62c04 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.670920] env[62405]: DEBUG oslo_vmware.api [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Waiting for the task: (returnval){ [ 1550.670920] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52f4cf1d-f5e5-dbff-a84b-7b6139e4b840" [ 1550.670920] env[62405]: _type = "Task" [ 1550.670920] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1550.673961] env[62405]: DEBUG nova.compute.manager [req-c1e8f193-4161-48bb-8c61-dde56753a792 req-c7447865-9244-4255-a746-161e0adbc4f5 service nova] [instance: 777ddb84-25b9-4da6-be6b-a2289dbf510a] Received event network-changed-c3206a84-3d77-4640-bfae-253a30dfa63c {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1550.674165] env[62405]: DEBUG nova.compute.manager [req-c1e8f193-4161-48bb-8c61-dde56753a792 req-c7447865-9244-4255-a746-161e0adbc4f5 service nova] [instance: 777ddb84-25b9-4da6-be6b-a2289dbf510a] Refreshing instance network info cache due to event network-changed-c3206a84-3d77-4640-bfae-253a30dfa63c. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1550.674415] env[62405]: DEBUG oslo_concurrency.lockutils [req-c1e8f193-4161-48bb-8c61-dde56753a792 req-c7447865-9244-4255-a746-161e0adbc4f5 service nova] Acquiring lock "refresh_cache-777ddb84-25b9-4da6-be6b-a2289dbf510a" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1550.674557] env[62405]: DEBUG oslo_concurrency.lockutils [req-c1e8f193-4161-48bb-8c61-dde56753a792 req-c7447865-9244-4255-a746-161e0adbc4f5 service nova] Acquired lock "refresh_cache-777ddb84-25b9-4da6-be6b-a2289dbf510a" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1550.675540] env[62405]: DEBUG nova.network.neutron [req-c1e8f193-4161-48bb-8c61-dde56753a792 req-c7447865-9244-4255-a746-161e0adbc4f5 service nova] [instance: 777ddb84-25b9-4da6-be6b-a2289dbf510a] Refreshing network info cache for port c3206a84-3d77-4640-bfae-253a30dfa63c {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1550.692785] env[62405]: DEBUG oslo_vmware.api [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52f4cf1d-f5e5-dbff-a84b-7b6139e4b840, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1550.727746] env[62405]: DEBUG nova.policy [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6472af0b6f6240f297f7f137cde41929', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bb1da47e8b1a400fab7817d9e6b282ed', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1550.879526] env[62405]: DEBUG oslo_vmware.api [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Task: {'id': task-1946904, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1550.905605] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5294a7b2-6b91-46a0-9297-5e141d4b9642 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Releasing lock "refresh_cache-058682a1-5240-4414-9203-c612ecd12999" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1550.905838] env[62405]: DEBUG nova.objects.instance [None req-5294a7b2-6b91-46a0-9297-5e141d4b9642 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Lazy-loading 'migration_context' on Instance uuid 058682a1-5240-4414-9203-c612ecd12999 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1550.969935] env[62405]: DEBUG nova.network.neutron [req-69000fcc-388e-43a3-8275-f442c6cdc322 req-34aa7b0d-7241-440a-a6a9-b230d69d6b6c service nova] [instance: b21dc1e7-dacd-4154-9bc3-0fa3774695a8] Updated VIF entry in instance network info cache for port e3b36820-3fc9-4b42-820d-9018b302c322. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1550.970410] env[62405]: DEBUG nova.network.neutron [req-69000fcc-388e-43a3-8275-f442c6cdc322 req-34aa7b0d-7241-440a-a6a9-b230d69d6b6c service nova] [instance: b21dc1e7-dacd-4154-9bc3-0fa3774695a8] Updating instance_info_cache with network_info: [{"id": "e3b36820-3fc9-4b42-820d-9018b302c322", "address": "fa:16:3e:a3:2a:5c", "network": {"id": "890b933d-5687-4c3b-aab8-4c8d68c71772", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-315909913-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "915d6ea5e5184efab9fbeda21e3b8a64", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "399f3826-705c-45f7-9fe0-3a08a945151a", "external-id": "nsx-vlan-transportzone-936", "segmentation_id": 936, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape3b36820-3f", "ovs_interfaceid": "e3b36820-3fc9-4b42-820d-9018b302c322", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1551.059816] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Acquiring lock "3c05b8fc-32b8-42e4-b3c2-95d1cdbc3dc8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1551.060141] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Lock "3c05b8fc-32b8-42e4-b3c2-95d1cdbc3dc8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1551.150128] env[62405]: DEBUG nova.compute.manager [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: ca4d11fe-1d0f-468b-a2f4-21c5b84342ab] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1551.189680] env[62405]: DEBUG oslo_vmware.api [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52f4cf1d-f5e5-dbff-a84b-7b6139e4b840, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1551.380420] env[62405]: DEBUG oslo_vmware.api [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Task: {'id': task-1946904, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1551.411905] env[62405]: DEBUG nova.objects.base [None req-5294a7b2-6b91-46a0-9297-5e141d4b9642 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Object Instance<058682a1-5240-4414-9203-c612ecd12999> lazy-loaded attributes: info_cache,migration_context {{(pid=62405) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1551.413535] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28028281-53eb-4fcd-8577-853ef03c7688 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.440250] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bc20586e-d9f0-4f64-9b75-1086c249d02f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.448673] env[62405]: DEBUG oslo_vmware.api [None req-5294a7b2-6b91-46a0-9297-5e141d4b9642 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Waiting for the task: (returnval){ [ 1551.448673] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5250f2ae-71b3-c67e-1912-21347af885e4" [ 1551.448673] env[62405]: _type = "Task" [ 1551.448673] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1551.456786] env[62405]: DEBUG oslo_vmware.api [None req-5294a7b2-6b91-46a0-9297-5e141d4b9642 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5250f2ae-71b3-c67e-1912-21347af885e4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1551.466266] env[62405]: DEBUG nova.network.neutron [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: ca4d11fe-1d0f-468b-a2f4-21c5b84342ab] Successfully created port: 677da2fd-a16d-4c43-b074-8aee4d0abe46 {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1551.475798] env[62405]: DEBUG oslo_concurrency.lockutils [req-69000fcc-388e-43a3-8275-f442c6cdc322 req-34aa7b0d-7241-440a-a6a9-b230d69d6b6c service nova] Releasing lock "refresh_cache-b21dc1e7-dacd-4154-9bc3-0fa3774695a8" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1551.482260] env[62405]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-02eb0a40-d617-4a6c-a19c-21780f36b307 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.619470] env[62405]: DEBUG nova.network.neutron [req-c1e8f193-4161-48bb-8c61-dde56753a792 req-c7447865-9244-4255-a746-161e0adbc4f5 service nova] [instance: 777ddb84-25b9-4da6-be6b-a2289dbf510a] Updated VIF entry in instance network info cache for port c3206a84-3d77-4640-bfae-253a30dfa63c. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1551.619839] env[62405]: DEBUG nova.network.neutron [req-c1e8f193-4161-48bb-8c61-dde56753a792 req-c7447865-9244-4255-a746-161e0adbc4f5 service nova] [instance: 777ddb84-25b9-4da6-be6b-a2289dbf510a] Updating instance_info_cache with network_info: [{"id": "c3206a84-3d77-4640-bfae-253a30dfa63c", "address": "fa:16:3e:a2:56:1e", "network": {"id": "5f90763f-2c20-4d8a-9274-7e692071a6cd", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1386170130-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.208", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "18b4edb74b5d4f7a95565aebf78c444f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d8383707-f093-40a7-a5ba-31b0e07cac45", "external-id": "cl2-zone-18", "segmentation_id": 18, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc3206a84-3d", "ovs_interfaceid": "c3206a84-3d77-4640-bfae-253a30dfa63c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1551.685762] env[62405]: DEBUG oslo_vmware.api [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52f4cf1d-f5e5-dbff-a84b-7b6139e4b840, 'name': SearchDatastore_Task, 'duration_secs': 0.783828} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1551.688841] env[62405]: DEBUG oslo_concurrency.lockutils [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1551.688841] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] [instance: b21dc1e7-dacd-4154-9bc3-0fa3774695a8] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1551.688841] env[62405]: DEBUG oslo_concurrency.lockutils [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1551.688841] env[62405]: DEBUG oslo_concurrency.lockutils [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1551.689099] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1551.689631] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b246cdba-82b0-44b9-83cf-a020cb8632c0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.822538] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-525b2be4-9487-4dac-a9f3-8f9c37339301 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.831094] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ce632b9-8bb4-4e54-86c1-c21cd1c18b4e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.862711] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cd08dbf-a245-4bb0-bc55-5215b97e5282 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.871047] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23a133b3-799b-46e6-9f34-48fd93466d18 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.883333] env[62405]: DEBUG oslo_vmware.api [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Task: {'id': task-1946904, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1551.891014] env[62405]: DEBUG nova.compute.provider_tree [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1551.960090] env[62405]: DEBUG oslo_vmware.api [None req-5294a7b2-6b91-46a0-9297-5e141d4b9642 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5250f2ae-71b3-c67e-1912-21347af885e4, 'name': SearchDatastore_Task, 'duration_secs': 0.020241} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1551.960461] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5294a7b2-6b91-46a0-9297-5e141d4b9642 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1552.125180] env[62405]: DEBUG oslo_concurrency.lockutils [req-c1e8f193-4161-48bb-8c61-dde56753a792 req-c7447865-9244-4255-a746-161e0adbc4f5 service nova] Releasing lock "refresh_cache-777ddb84-25b9-4da6-be6b-a2289dbf510a" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1552.172028] env[62405]: DEBUG nova.compute.manager [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: ca4d11fe-1d0f-468b-a2f4-21c5b84342ab] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1552.205577] env[62405]: DEBUG nova.virt.hardware [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1552.205834] env[62405]: DEBUG nova.virt.hardware [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1552.205997] env[62405]: DEBUG nova.virt.hardware [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1552.206207] env[62405]: DEBUG nova.virt.hardware [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1552.206356] env[62405]: DEBUG nova.virt.hardware [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1552.206503] env[62405]: DEBUG nova.virt.hardware [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1552.206710] env[62405]: DEBUG nova.virt.hardware [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1552.206867] env[62405]: DEBUG nova.virt.hardware [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1552.208027] env[62405]: DEBUG nova.virt.hardware [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1552.208027] env[62405]: DEBUG nova.virt.hardware [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1552.208027] env[62405]: DEBUG nova.virt.hardware [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1552.208967] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb9828ea-4dd7-4ca8-a25f-d88bc9b82712 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.219301] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4355ef43-28c4-457d-8680-710b1baa2789 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.387616] env[62405]: DEBUG oslo_vmware.api [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Task: {'id': task-1946904, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1552.396780] env[62405]: DEBUG nova.scheduler.client.report [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1552.401065] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1552.401163] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Starting heal instance info cache {{(pid=62405) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10446}} [ 1552.689672] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1552.690855] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1552.690855] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5048b9b3-33ea-4bb2-ac4d-1473d832dc77 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.697838] env[62405]: DEBUG oslo_vmware.api [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Waiting for the task: (returnval){ [ 1552.697838] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52c080ae-38d7-0c2d-811d-6c1849a9e4e6" [ 1552.697838] env[62405]: _type = "Task" [ 1552.697838] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1552.714532] env[62405]: DEBUG oslo_vmware.api [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52c080ae-38d7-0c2d-811d-6c1849a9e4e6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1552.895307] env[62405]: DEBUG oslo_vmware.api [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Task: {'id': task-1946904, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1552.904297] env[62405]: DEBUG oslo_concurrency.lockutils [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.774s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1552.905795] env[62405]: DEBUG nova.compute.manager [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] [instance: 65462c7a-372e-4ba6-8f6d-e300080d65d0] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1552.910337] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.594s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1552.912099] env[62405]: INFO nova.compute.claims [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1553.215777] env[62405]: DEBUG oslo_vmware.api [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52c080ae-38d7-0c2d-811d-6c1849a9e4e6, 'name': SearchDatastore_Task, 'duration_secs': 0.026153} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1553.216797] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bec46417-b051-46fd-a170-dc8684c32fbc {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.225144] env[62405]: DEBUG oslo_vmware.api [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Waiting for the task: (returnval){ [ 1553.225144] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]528397f1-3b43-1814-5a45-70ab165c93d5" [ 1553.225144] env[62405]: _type = "Task" [ 1553.225144] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1553.239778] env[62405]: DEBUG oslo_vmware.api [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]528397f1-3b43-1814-5a45-70ab165c93d5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1553.391538] env[62405]: DEBUG oslo_vmware.api [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Task: {'id': task-1946904, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1553.412298] env[62405]: DEBUG nova.compute.utils [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1553.414697] env[62405]: DEBUG nova.compute.manager [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] [instance: 65462c7a-372e-4ba6-8f6d-e300080d65d0] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1553.414697] env[62405]: DEBUG nova.network.neutron [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] [instance: 65462c7a-372e-4ba6-8f6d-e300080d65d0] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1553.481918] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Acquiring lock "refresh_cache-6199de01-baca-4461-9572-111eda11adac" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1553.482331] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Acquired lock "refresh_cache-6199de01-baca-4461-9572-111eda11adac" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1553.482546] env[62405]: DEBUG nova.network.neutron [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 6199de01-baca-4461-9572-111eda11adac] Forcefully refreshing network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1553.599478] env[62405]: DEBUG nova.policy [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'dd25c41c232349ef87887a4285b71767', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6d1aee7c44f44abc86ed5c15b027e989', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1553.740033] env[62405]: DEBUG oslo_vmware.api [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]528397f1-3b43-1814-5a45-70ab165c93d5, 'name': SearchDatastore_Task, 'duration_secs': 0.064598} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1553.740497] env[62405]: DEBUG oslo_concurrency.lockutils [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1553.741728] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] b21dc1e7-dacd-4154-9bc3-0fa3774695a8/b21dc1e7-dacd-4154-9bc3-0fa3774695a8.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1553.741728] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1998eeef-2971-448b-a991-c397949618e4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.752665] env[62405]: DEBUG oslo_vmware.api [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Waiting for the task: (returnval){ [ 1553.752665] env[62405]: value = "task-1946908" [ 1553.752665] env[62405]: _type = "Task" [ 1553.752665] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1553.768792] env[62405]: DEBUG oslo_vmware.api [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Task: {'id': task-1946908, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1553.891095] env[62405]: DEBUG oslo_vmware.api [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Task: {'id': task-1946904, 'name': CopyVirtualDisk_Task, 'duration_secs': 3.550392} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1553.891439] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] f8c6f99f-499f-4886-aae9-5f08969175f6/f8c6f99f-499f-4886-aae9-5f08969175f6.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1553.891733] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] [instance: f8c6f99f-499f-4886-aae9-5f08969175f6] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1553.892103] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5be60f16-733b-4494-b641-ba33488b5c09 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.902060] env[62405]: DEBUG oslo_vmware.api [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Waiting for the task: (returnval){ [ 1553.902060] env[62405]: value = "task-1946909" [ 1553.902060] env[62405]: _type = "Task" [ 1553.902060] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1553.913611] env[62405]: DEBUG oslo_vmware.api [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Task: {'id': task-1946909, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1553.920865] env[62405]: DEBUG nova.compute.manager [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] [instance: 65462c7a-372e-4ba6-8f6d-e300080d65d0] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1554.021412] env[62405]: DEBUG nova.compute.manager [req-7fffba8c-1d9b-481c-8d63-a4a110cf706f req-a01e0b59-d786-44ae-b637-c307c1d47ddd service nova] [instance: ca4d11fe-1d0f-468b-a2f4-21c5b84342ab] Received event network-vif-plugged-677da2fd-a16d-4c43-b074-8aee4d0abe46 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1554.021703] env[62405]: DEBUG oslo_concurrency.lockutils [req-7fffba8c-1d9b-481c-8d63-a4a110cf706f req-a01e0b59-d786-44ae-b637-c307c1d47ddd service nova] Acquiring lock "ca4d11fe-1d0f-468b-a2f4-21c5b84342ab-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1554.021906] env[62405]: DEBUG oslo_concurrency.lockutils [req-7fffba8c-1d9b-481c-8d63-a4a110cf706f req-a01e0b59-d786-44ae-b637-c307c1d47ddd service nova] Lock "ca4d11fe-1d0f-468b-a2f4-21c5b84342ab-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1554.022080] env[62405]: DEBUG oslo_concurrency.lockutils [req-7fffba8c-1d9b-481c-8d63-a4a110cf706f req-a01e0b59-d786-44ae-b637-c307c1d47ddd service nova] Lock "ca4d11fe-1d0f-468b-a2f4-21c5b84342ab-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1554.022327] env[62405]: DEBUG nova.compute.manager [req-7fffba8c-1d9b-481c-8d63-a4a110cf706f req-a01e0b59-d786-44ae-b637-c307c1d47ddd service nova] [instance: ca4d11fe-1d0f-468b-a2f4-21c5b84342ab] No waiting events found dispatching network-vif-plugged-677da2fd-a16d-4c43-b074-8aee4d0abe46 {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1554.022430] env[62405]: WARNING nova.compute.manager [req-7fffba8c-1d9b-481c-8d63-a4a110cf706f req-a01e0b59-d786-44ae-b637-c307c1d47ddd service nova] [instance: ca4d11fe-1d0f-468b-a2f4-21c5b84342ab] Received unexpected event network-vif-plugged-677da2fd-a16d-4c43-b074-8aee4d0abe46 for instance with vm_state building and task_state spawning. [ 1554.024838] env[62405]: DEBUG nova.network.neutron [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 6199de01-baca-4461-9572-111eda11adac] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1554.123719] env[62405]: DEBUG nova.network.neutron [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: ca4d11fe-1d0f-468b-a2f4-21c5b84342ab] Successfully updated port: 677da2fd-a16d-4c43-b074-8aee4d0abe46 {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1554.135202] env[62405]: DEBUG nova.network.neutron [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] [instance: 65462c7a-372e-4ba6-8f6d-e300080d65d0] Successfully created port: 62da0bb8-4a2d-4e69-a4da-3970ca057cad {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1554.268985] env[62405]: DEBUG oslo_vmware.api [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Task: {'id': task-1946908, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1554.429193] env[62405]: DEBUG oslo_vmware.api [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Task: {'id': task-1946909, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.147052} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1554.429788] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] [instance: f8c6f99f-499f-4886-aae9-5f08969175f6] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1554.430747] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bbfd16d-8d4a-4deb-b6a2-47ff80014ab5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.459998] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] [instance: f8c6f99f-499f-4886-aae9-5f08969175f6] Reconfiguring VM instance instance-00000016 to attach disk [datastore1] f8c6f99f-499f-4886-aae9-5f08969175f6/f8c6f99f-499f-4886-aae9-5f08969175f6.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1554.463667] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f2e181a9-e915-4b7a-8092-5d9888e55c13 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.487116] env[62405]: DEBUG oslo_vmware.api [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Waiting for the task: (returnval){ [ 1554.487116] env[62405]: value = "task-1946910" [ 1554.487116] env[62405]: _type = "Task" [ 1554.487116] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1554.498372] env[62405]: DEBUG oslo_vmware.api [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Task: {'id': task-1946910, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1554.567581] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ea688b4-b63c-41b0-9ecc-edff3cbcd650 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.581720] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a450e322-7138-4154-939c-4efdedb601a5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.633686] env[62405]: DEBUG oslo_concurrency.lockutils [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Acquiring lock "refresh_cache-ca4d11fe-1d0f-468b-a2f4-21c5b84342ab" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1554.633686] env[62405]: DEBUG oslo_concurrency.lockutils [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Acquired lock "refresh_cache-ca4d11fe-1d0f-468b-a2f4-21c5b84342ab" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1554.633686] env[62405]: DEBUG nova.network.neutron [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: ca4d11fe-1d0f-468b-a2f4-21c5b84342ab] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1554.633686] env[62405]: DEBUG nova.network.neutron [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 6199de01-baca-4461-9572-111eda11adac] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1554.634997] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9b5b28c-088a-4db0-b447-1285dbf70863 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.646682] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db734f6e-e299-4034-ae09-fb8c7ac4aaaf {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.664480] env[62405]: DEBUG nova.compute.provider_tree [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1554.765182] env[62405]: DEBUG oslo_vmware.api [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Task: {'id': task-1946908, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.773582} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1554.765517] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] b21dc1e7-dacd-4154-9bc3-0fa3774695a8/b21dc1e7-dacd-4154-9bc3-0fa3774695a8.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1554.765795] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] [instance: b21dc1e7-dacd-4154-9bc3-0fa3774695a8] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1554.766179] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-dcd3694b-67cd-455a-849d-2d42444e7839 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.775770] env[62405]: DEBUG oslo_vmware.api [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Waiting for the task: (returnval){ [ 1554.775770] env[62405]: value = "task-1946911" [ 1554.775770] env[62405]: _type = "Task" [ 1554.775770] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1554.784818] env[62405]: DEBUG oslo_vmware.api [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Task: {'id': task-1946911, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1554.931220] env[62405]: DEBUG nova.compute.manager [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] [instance: 65462c7a-372e-4ba6-8f6d-e300080d65d0] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1554.999896] env[62405]: DEBUG oslo_vmware.api [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Task: {'id': task-1946910, 'name': ReconfigVM_Task, 'duration_secs': 0.326298} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1555.000469] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] [instance: f8c6f99f-499f-4886-aae9-5f08969175f6] Reconfigured VM instance instance-00000016 to attach disk [datastore1] f8c6f99f-499f-4886-aae9-5f08969175f6/f8c6f99f-499f-4886-aae9-5f08969175f6.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1555.001250] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e959722f-8f6e-4a10-8614-8a2f686e517a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.013195] env[62405]: DEBUG oslo_vmware.api [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Waiting for the task: (returnval){ [ 1555.013195] env[62405]: value = "task-1946912" [ 1555.013195] env[62405]: _type = "Task" [ 1555.013195] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1555.021821] env[62405]: DEBUG oslo_vmware.api [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Task: {'id': task-1946912, 'name': Rename_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1555.139618] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Releasing lock "refresh_cache-6199de01-baca-4461-9572-111eda11adac" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1555.140296] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 6199de01-baca-4461-9572-111eda11adac] Updated the network info_cache for instance {{(pid=62405) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10517}} [ 1555.140296] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1555.141028] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1555.141028] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62405) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11065}} [ 1555.170151] env[62405]: DEBUG nova.scheduler.client.report [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1555.178453] env[62405]: DEBUG nova.network.neutron [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: ca4d11fe-1d0f-468b-a2f4-21c5b84342ab] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1555.288166] env[62405]: DEBUG oslo_vmware.api [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Task: {'id': task-1946911, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.08061} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1555.288166] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] [instance: b21dc1e7-dacd-4154-9bc3-0fa3774695a8] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1555.289147] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36354b7b-83a4-4570-b7f8-dcbfb1396bbe {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.316778] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] [instance: b21dc1e7-dacd-4154-9bc3-0fa3774695a8] Reconfiguring VM instance instance-00000017 to attach disk [datastore1] b21dc1e7-dacd-4154-9bc3-0fa3774695a8/b21dc1e7-dacd-4154-9bc3-0fa3774695a8.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1555.317153] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eb4f0d15-53c5-4b4b-9bb0-642e90e3d44b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.332482] env[62405]: DEBUG nova.network.neutron [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: ca4d11fe-1d0f-468b-a2f4-21c5b84342ab] Updating instance_info_cache with network_info: [{"id": "677da2fd-a16d-4c43-b074-8aee4d0abe46", "address": "fa:16:3e:d6:b9:19", "network": {"id": "ac0e1447-1c61-4770-8006-3a99edc76f93", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1648941742-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bb1da47e8b1a400fab7817d9e6b282ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "298bb8ef-4765-494c-b157-7a349218bd1e", "external-id": "nsx-vlan-transportzone-905", "segmentation_id": 905, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap677da2fd-a1", "ovs_interfaceid": "677da2fd-a16d-4c43-b074-8aee4d0abe46", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1555.343073] env[62405]: DEBUG oslo_vmware.api [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Waiting for the task: (returnval){ [ 1555.343073] env[62405]: value = "task-1946913" [ 1555.343073] env[62405]: _type = "Task" [ 1555.343073] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1555.353441] env[62405]: DEBUG oslo_vmware.api [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Task: {'id': task-1946913, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1555.400840] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1555.400969] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager.update_available_resource {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1555.524529] env[62405]: DEBUG oslo_vmware.api [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Task: {'id': task-1946912, 'name': Rename_Task, 'duration_secs': 0.15945} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1555.524891] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] [instance: f8c6f99f-499f-4886-aae9-5f08969175f6] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1555.525195] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7f791d18-0771-4507-9356-019dc82aab47 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.533524] env[62405]: DEBUG oslo_vmware.api [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Waiting for the task: (returnval){ [ 1555.533524] env[62405]: value = "task-1946914" [ 1555.533524] env[62405]: _type = "Task" [ 1555.533524] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1555.542664] env[62405]: DEBUG oslo_vmware.api [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Task: {'id': task-1946914, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1555.680884] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.770s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1555.681632] env[62405]: DEBUG nova.compute.manager [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1555.689226] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a7ffb043-2765-4989-b831-725982cf077a tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 36.733s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1555.689489] env[62405]: DEBUG nova.objects.instance [None req-a7ffb043-2765-4989-b831-725982cf077a tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Lazy-loading 'resources' on Instance uuid 02abae6c-8962-49eb-8fa9-36b13a20eff1 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1555.709230] env[62405]: DEBUG nova.network.neutron [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] [instance: 65462c7a-372e-4ba6-8f6d-e300080d65d0] Successfully updated port: 62da0bb8-4a2d-4e69-a4da-3970ca057cad {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1555.836366] env[62405]: DEBUG oslo_concurrency.lockutils [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Releasing lock "refresh_cache-ca4d11fe-1d0f-468b-a2f4-21c5b84342ab" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1555.836798] env[62405]: DEBUG nova.compute.manager [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: ca4d11fe-1d0f-468b-a2f4-21c5b84342ab] Instance network_info: |[{"id": "677da2fd-a16d-4c43-b074-8aee4d0abe46", "address": "fa:16:3e:d6:b9:19", "network": {"id": "ac0e1447-1c61-4770-8006-3a99edc76f93", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1648941742-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bb1da47e8b1a400fab7817d9e6b282ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "298bb8ef-4765-494c-b157-7a349218bd1e", "external-id": "nsx-vlan-transportzone-905", "segmentation_id": 905, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap677da2fd-a1", "ovs_interfaceid": "677da2fd-a16d-4c43-b074-8aee4d0abe46", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1555.837287] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: ca4d11fe-1d0f-468b-a2f4-21c5b84342ab] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d6:b9:19', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '298bb8ef-4765-494c-b157-7a349218bd1e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '677da2fd-a16d-4c43-b074-8aee4d0abe46', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1555.845687] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Creating folder: Project (bb1da47e8b1a400fab7817d9e6b282ed). Parent ref: group-v401284. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1555.846067] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b3489b31-baba-47c0-a1fd-05ce862e1f54 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.857844] env[62405]: DEBUG oslo_vmware.api [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Task: {'id': task-1946913, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1555.904830] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1556.046134] env[62405]: DEBUG oslo_vmware.api [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Task: {'id': task-1946914, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1556.048331] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Created folder: Project (bb1da47e8b1a400fab7817d9e6b282ed) in parent group-v401284. [ 1556.048514] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Creating folder: Instances. Parent ref: group-v401359. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1556.048745] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cc03c422-ec23-450f-ac78-810d792366bc {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.060260] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Created folder: Instances in parent group-v401359. [ 1556.060260] env[62405]: DEBUG oslo.service.loopingcall [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1556.060397] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ca4d11fe-1d0f-468b-a2f4-21c5b84342ab] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1556.060600] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-57190b2c-9e53-491f-941f-c0deb8acd88c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.080968] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1556.080968] env[62405]: value = "task-1946917" [ 1556.080968] env[62405]: _type = "Task" [ 1556.080968] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1556.089483] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946917, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1556.191559] env[62405]: DEBUG nova.compute.utils [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1556.196655] env[62405]: DEBUG nova.compute.manager [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1556.196934] env[62405]: DEBUG nova.network.neutron [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1556.212727] env[62405]: DEBUG oslo_concurrency.lockutils [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Acquiring lock "refresh_cache-65462c7a-372e-4ba6-8f6d-e300080d65d0" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1556.212904] env[62405]: DEBUG oslo_concurrency.lockutils [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Acquired lock "refresh_cache-65462c7a-372e-4ba6-8f6d-e300080d65d0" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1556.213095] env[62405]: DEBUG nova.network.neutron [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] [instance: 65462c7a-372e-4ba6-8f6d-e300080d65d0] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1556.250619] env[62405]: DEBUG nova.policy [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4ad5e220132245168b59ff3df599b974', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f3b50cc219314108945bfc8b2c21849a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1556.359100] env[62405]: DEBUG oslo_vmware.api [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Task: {'id': task-1946913, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1556.552738] env[62405]: DEBUG oslo_vmware.api [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Task: {'id': task-1946914, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1556.595798] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946917, 'name': CreateVM_Task, 'duration_secs': 0.430391} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1556.595798] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ca4d11fe-1d0f-468b-a2f4-21c5b84342ab] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1556.596605] env[62405]: DEBUG nova.network.neutron [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Successfully created port: 7e786917-4e46-4359-899e-afc1456451ae {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1556.599182] env[62405]: DEBUG oslo_concurrency.lockutils [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1556.599365] env[62405]: DEBUG oslo_concurrency.lockutils [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1556.599734] env[62405]: DEBUG oslo_concurrency.lockutils [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1556.599998] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3e3d5285-b152-42f3-b164-f80531548c32 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.607181] env[62405]: DEBUG oslo_vmware.api [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Waiting for the task: (returnval){ [ 1556.607181] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52e25b88-51b2-730c-f548-233b6f8faa90" [ 1556.607181] env[62405]: _type = "Task" [ 1556.607181] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1556.620861] env[62405]: DEBUG oslo_vmware.api [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52e25b88-51b2-730c-f548-233b6f8faa90, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1556.703258] env[62405]: DEBUG nova.compute.manager [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1556.749142] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35108a89-4989-4c27-92a9-816ebe4ef8db {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.759660] env[62405]: DEBUG nova.network.neutron [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] [instance: 65462c7a-372e-4ba6-8f6d-e300080d65d0] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1556.762557] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb9b8b1e-2ecc-444a-b043-e2a9da305a4e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.798557] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-396ede33-79e2-40fc-931a-540a40d60907 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.807711] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e249784-19da-4a5f-aca8-e1d05412c411 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.822762] env[62405]: DEBUG nova.compute.provider_tree [None req-a7ffb043-2765-4989-b831-725982cf077a tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1556.856801] env[62405]: DEBUG oslo_vmware.api [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Task: {'id': task-1946913, 'name': ReconfigVM_Task, 'duration_secs': 1.234496} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1556.859645] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] [instance: b21dc1e7-dacd-4154-9bc3-0fa3774695a8] Reconfigured VM instance instance-00000017 to attach disk [datastore1] b21dc1e7-dacd-4154-9bc3-0fa3774695a8/b21dc1e7-dacd-4154-9bc3-0fa3774695a8.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1556.860438] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-02c7ea99-b72f-4619-a15e-60d90aaf7d15 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.869555] env[62405]: DEBUG oslo_vmware.api [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Waiting for the task: (returnval){ [ 1556.869555] env[62405]: value = "task-1946918" [ 1556.869555] env[62405]: _type = "Task" [ 1556.869555] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1556.880908] env[62405]: DEBUG oslo_vmware.api [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Task: {'id': task-1946918, 'name': Rename_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1556.938023] env[62405]: DEBUG nova.network.neutron [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] [instance: 65462c7a-372e-4ba6-8f6d-e300080d65d0] Updating instance_info_cache with network_info: [{"id": "62da0bb8-4a2d-4e69-a4da-3970ca057cad", "address": "fa:16:3e:34:fe:9b", "network": {"id": "869979f7-5a22-4c11-bb77-c48a5d5f934f", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1534576533-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6d1aee7c44f44abc86ed5c15b027e989", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08e9585e-6186-4788-9fd9-24174ce45a6f", "external-id": "nsx-vlan-transportzone-254", "segmentation_id": 254, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap62da0bb8-4a", "ovs_interfaceid": "62da0bb8-4a2d-4e69-a4da-3970ca057cad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1557.046749] env[62405]: DEBUG oslo_vmware.api [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Task: {'id': task-1946914, 'name': PowerOnVM_Task, 'duration_secs': 1.14274} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1557.046965] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] [instance: f8c6f99f-499f-4886-aae9-5f08969175f6] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1557.047186] env[62405]: INFO nova.compute.manager [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] [instance: f8c6f99f-499f-4886-aae9-5f08969175f6] Took 14.12 seconds to spawn the instance on the hypervisor. [ 1557.047373] env[62405]: DEBUG nova.compute.manager [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] [instance: f8c6f99f-499f-4886-aae9-5f08969175f6] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1557.048167] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dbdc995-17fe-4eed-a78f-b48f05a59584 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.121887] env[62405]: DEBUG oslo_vmware.api [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52e25b88-51b2-730c-f548-233b6f8faa90, 'name': SearchDatastore_Task, 'duration_secs': 0.017538} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1557.122225] env[62405]: DEBUG oslo_concurrency.lockutils [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1557.122466] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: ca4d11fe-1d0f-468b-a2f4-21c5b84342ab] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1557.122774] env[62405]: DEBUG oslo_concurrency.lockutils [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1557.122943] env[62405]: DEBUG oslo_concurrency.lockutils [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1557.123332] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1557.123456] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-071c0b02-d309-4dfc-a243-b985d571fe24 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.134787] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1557.134975] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1557.135736] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b0ff3154-72e6-4329-aa3b-acc58d6152ac {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.141904] env[62405]: DEBUG oslo_vmware.api [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Waiting for the task: (returnval){ [ 1557.141904] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5255df01-2a47-2d70-5a83-44f0f8c80fbf" [ 1557.141904] env[62405]: _type = "Task" [ 1557.141904] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1557.151486] env[62405]: DEBUG oslo_vmware.api [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5255df01-2a47-2d70-5a83-44f0f8c80fbf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1557.328905] env[62405]: DEBUG nova.scheduler.client.report [None req-a7ffb043-2765-4989-b831-725982cf077a tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1557.380849] env[62405]: DEBUG oslo_vmware.api [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Task: {'id': task-1946918, 'name': Rename_Task, 'duration_secs': 0.235429} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1557.381151] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] [instance: b21dc1e7-dacd-4154-9bc3-0fa3774695a8] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1557.381426] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-03c7df68-ce65-42d8-be84-9a4003a6dbb6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.389926] env[62405]: DEBUG oslo_vmware.api [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Waiting for the task: (returnval){ [ 1557.389926] env[62405]: value = "task-1946919" [ 1557.389926] env[62405]: _type = "Task" [ 1557.389926] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1557.398338] env[62405]: DEBUG oslo_vmware.api [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Task: {'id': task-1946919, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1557.440601] env[62405]: DEBUG oslo_concurrency.lockutils [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Releasing lock "refresh_cache-65462c7a-372e-4ba6-8f6d-e300080d65d0" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1557.440974] env[62405]: DEBUG nova.compute.manager [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] [instance: 65462c7a-372e-4ba6-8f6d-e300080d65d0] Instance network_info: |[{"id": "62da0bb8-4a2d-4e69-a4da-3970ca057cad", "address": "fa:16:3e:34:fe:9b", "network": {"id": "869979f7-5a22-4c11-bb77-c48a5d5f934f", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1534576533-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6d1aee7c44f44abc86ed5c15b027e989", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08e9585e-6186-4788-9fd9-24174ce45a6f", "external-id": "nsx-vlan-transportzone-254", "segmentation_id": 254, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap62da0bb8-4a", "ovs_interfaceid": "62da0bb8-4a2d-4e69-a4da-3970ca057cad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1557.569344] env[62405]: INFO nova.compute.manager [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] [instance: f8c6f99f-499f-4886-aae9-5f08969175f6] Took 44.32 seconds to build instance. [ 1557.654115] env[62405]: DEBUG oslo_vmware.api [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5255df01-2a47-2d70-5a83-44f0f8c80fbf, 'name': SearchDatastore_Task, 'duration_secs': 0.022861} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1557.654968] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-125c9119-0afb-4775-8858-0f361bad4f44 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.661306] env[62405]: DEBUG oslo_vmware.api [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Waiting for the task: (returnval){ [ 1557.661306] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52ceb095-06d5-7dee-25ac-ddffa1b6c436" [ 1557.661306] env[62405]: _type = "Task" [ 1557.661306] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1557.670182] env[62405]: DEBUG oslo_vmware.api [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52ceb095-06d5-7dee-25ac-ddffa1b6c436, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1557.712826] env[62405]: DEBUG nova.compute.manager [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1557.834301] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a7ffb043-2765-4989-b831-725982cf077a tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.145s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1557.837930] env[62405]: DEBUG oslo_concurrency.lockutils [None req-df06cd55-95fe-4b37-813d-25bb8e23e856 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.800s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1557.837930] env[62405]: DEBUG nova.objects.instance [None req-df06cd55-95fe-4b37-813d-25bb8e23e856 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Lazy-loading 'resources' on Instance uuid 8995f9cb-8454-4a98-9090-290f87f8af18 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1557.868805] env[62405]: INFO nova.scheduler.client.report [None req-a7ffb043-2765-4989-b831-725982cf077a tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Deleted allocations for instance 02abae6c-8962-49eb-8fa9-36b13a20eff1 [ 1557.907129] env[62405]: DEBUG oslo_vmware.api [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Task: {'id': task-1946919, 'name': PowerOnVM_Task} progress is 1%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1558.071844] env[62405]: DEBUG oslo_concurrency.lockutils [None req-492957f0-fc9e-4ac3-8a9a-ab331bf6fc7e tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Lock "f8c6f99f-499f-4886-aae9-5f08969175f6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 57.922s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1558.162798] env[62405]: DEBUG nova.network.neutron [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Successfully updated port: 7e786917-4e46-4359-899e-afc1456451ae {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1558.176203] env[62405]: DEBUG oslo_vmware.api [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52ceb095-06d5-7dee-25ac-ddffa1b6c436, 'name': SearchDatastore_Task, 'duration_secs': 0.017811} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1558.176535] env[62405]: DEBUG oslo_concurrency.lockutils [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1558.176856] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] ca4d11fe-1d0f-468b-a2f4-21c5b84342ab/ca4d11fe-1d0f-468b-a2f4-21c5b84342ab.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1558.177676] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5d301a3d-68ee-4f6f-b098-a16e95fb1148 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.185030] env[62405]: DEBUG oslo_vmware.api [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Waiting for the task: (returnval){ [ 1558.185030] env[62405]: value = "task-1946920" [ 1558.185030] env[62405]: _type = "Task" [ 1558.185030] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1558.195982] env[62405]: DEBUG oslo_vmware.api [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1946920, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1558.379373] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a7ffb043-2765-4989-b831-725982cf077a tempest-ServerExternalEventsTest-1509112354 tempest-ServerExternalEventsTest-1509112354-project-member] Lock "02abae6c-8962-49eb-8fa9-36b13a20eff1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.558s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1558.400685] env[62405]: DEBUG oslo_vmware.api [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Task: {'id': task-1946919, 'name': PowerOnVM_Task} progress is 64%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1558.404504] env[62405]: DEBUG nova.virt.hardware [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1558.404504] env[62405]: DEBUG nova.virt.hardware [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1558.404504] env[62405]: DEBUG nova.virt.hardware [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1558.404745] env[62405]: DEBUG nova.virt.hardware [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1558.404745] env[62405]: DEBUG nova.virt.hardware [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1558.404745] env[62405]: DEBUG nova.virt.hardware [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1558.404745] env[62405]: DEBUG nova.virt.hardware [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1558.404745] env[62405]: DEBUG nova.virt.hardware [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1558.404875] env[62405]: DEBUG nova.virt.hardware [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1558.404875] env[62405]: DEBUG nova.virt.hardware [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1558.404875] env[62405]: DEBUG nova.virt.hardware [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1558.409111] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdc24058-0a2f-428d-ac1b-71387e6d0807 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.418261] env[62405]: DEBUG nova.virt.hardware [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1558.418553] env[62405]: DEBUG nova.virt.hardware [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1558.418714] env[62405]: DEBUG nova.virt.hardware [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1558.418892] env[62405]: DEBUG nova.virt.hardware [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1558.419074] env[62405]: DEBUG nova.virt.hardware [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1558.419245] env[62405]: DEBUG nova.virt.hardware [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1558.419448] env[62405]: DEBUG nova.virt.hardware [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1558.419605] env[62405]: DEBUG nova.virt.hardware [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1558.419777] env[62405]: DEBUG nova.virt.hardware [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1558.419928] env[62405]: DEBUG nova.virt.hardware [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1558.420100] env[62405]: DEBUG nova.virt.hardware [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1558.421375] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ec3e0b6-3016-4411-883c-78bd2e764979 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.427319] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a33d2d25-91ed-4e71-b3fe-dd8af359162f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.435829] env[62405]: DEBUG oslo_vmware.rw_handles [None req-7b88e272-7c72-414d-8240-40a604e31ff5 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5240e3b0-8f4d-18fd-a617-f9080a8ab858/disk-0.vmdk. {{(pid=62405) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1558.436692] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20b264f0-f135-42f4-b28e-432e083de696 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.450198] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] [instance: 65462c7a-372e-4ba6-8f6d-e300080d65d0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:34:fe:9b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '08e9585e-6186-4788-9fd9-24174ce45a6f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '62da0bb8-4a2d-4e69-a4da-3970ca057cad', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1558.457936] env[62405]: DEBUG oslo.service.loopingcall [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1558.462421] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20f9f225-b61e-4821-a844-ff09fb8ec1f9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.466450] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 65462c7a-372e-4ba6-8f6d-e300080d65d0] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1558.468274] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dbfa934c-3328-4de3-87bb-6ed8562fd8a4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.483177] env[62405]: DEBUG oslo_vmware.rw_handles [None req-7b88e272-7c72-414d-8240-40a604e31ff5 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5240e3b0-8f4d-18fd-a617-f9080a8ab858/disk-0.vmdk is in state: ready. {{(pid=62405) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1558.483417] env[62405]: ERROR oslo_vmware.rw_handles [None req-7b88e272-7c72-414d-8240-40a604e31ff5 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5240e3b0-8f4d-18fd-a617-f9080a8ab858/disk-0.vmdk due to incomplete transfer. [ 1558.486812] env[62405]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-2809edf5-75d3-47f6-971d-ed2d5a5d7735 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.501947] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1558.501947] env[62405]: value = "task-1946921" [ 1558.501947] env[62405]: _type = "Task" [ 1558.501947] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1558.503212] env[62405]: DEBUG oslo_vmware.rw_handles [None req-7b88e272-7c72-414d-8240-40a604e31ff5 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5240e3b0-8f4d-18fd-a617-f9080a8ab858/disk-0.vmdk. {{(pid=62405) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1558.503436] env[62405]: DEBUG nova.virt.vmwareapi.images [None req-7b88e272-7c72-414d-8240-40a604e31ff5 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b8ff115b-64f1-4584-afa2-478c5e6b726b] Uploaded image 47c64ab2-fad8-4839-a33b-53a897e09d15 to the Glance image server {{(pid=62405) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1558.505301] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b88e272-7c72-414d-8240-40a604e31ff5 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b8ff115b-64f1-4584-afa2-478c5e6b726b] Destroying the VM {{(pid=62405) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1558.509885] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-802154c8-319c-45cf-a2a8-7ea92737d1b1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.519426] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946921, 'name': CreateVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1558.521463] env[62405]: DEBUG oslo_vmware.api [None req-7b88e272-7c72-414d-8240-40a604e31ff5 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Waiting for the task: (returnval){ [ 1558.521463] env[62405]: value = "task-1946922" [ 1558.521463] env[62405]: _type = "Task" [ 1558.521463] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1558.537332] env[62405]: DEBUG oslo_vmware.api [None req-7b88e272-7c72-414d-8240-40a604e31ff5 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': task-1946922, 'name': Destroy_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1558.576323] env[62405]: DEBUG nova.compute.manager [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] [instance: 9e73e2ab-1eac-4aca-905f-a8391d3f5a9b] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1558.670965] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Acquiring lock "refresh_cache-15218373-ffa5-49ce-b604-423b7fc5fb35" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1558.670965] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Acquired lock "refresh_cache-15218373-ffa5-49ce-b604-423b7fc5fb35" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1558.671128] env[62405]: DEBUG nova.network.neutron [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1558.700034] env[62405]: DEBUG oslo_vmware.api [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1946920, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1558.903795] env[62405]: DEBUG oslo_vmware.api [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Task: {'id': task-1946919, 'name': PowerOnVM_Task} progress is 64%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1558.925282] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65244724-7888-4295-8231-de4f7418d2fa {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.935267] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f57e05a-af3f-4fda-99a6-f2532684c889 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.970859] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f6eca44-4eb6-4c27-adaa-19c6e9ef9d82 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.979411] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b532624-26d8-4573-9440-f885fc5cdf52 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.993715] env[62405]: DEBUG nova.compute.provider_tree [None req-df06cd55-95fe-4b37-813d-25bb8e23e856 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1559.013945] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946921, 'name': CreateVM_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1559.031785] env[62405]: DEBUG oslo_vmware.api [None req-7b88e272-7c72-414d-8240-40a604e31ff5 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': task-1946922, 'name': Destroy_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1559.103972] env[62405]: DEBUG oslo_concurrency.lockutils [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1559.203029] env[62405]: DEBUG oslo_vmware.api [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1946920, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.766891} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1559.203029] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] ca4d11fe-1d0f-468b-a2f4-21c5b84342ab/ca4d11fe-1d0f-468b-a2f4-21c5b84342ab.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1559.203029] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: ca4d11fe-1d0f-468b-a2f4-21c5b84342ab] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1559.203029] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-57e5b060-1166-4a10-8508-c011687cecb4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.210727] env[62405]: DEBUG oslo_vmware.api [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Waiting for the task: (returnval){ [ 1559.210727] env[62405]: value = "task-1946923" [ 1559.210727] env[62405]: _type = "Task" [ 1559.210727] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1559.221595] env[62405]: DEBUG oslo_vmware.api [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1946923, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1559.222879] env[62405]: DEBUG nova.network.neutron [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1559.384078] env[62405]: DEBUG nova.network.neutron [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Updating instance_info_cache with network_info: [{"id": "7e786917-4e46-4359-899e-afc1456451ae", "address": "fa:16:3e:75:14:e2", "network": {"id": "3ca0a5a2-a18f-47fa-9d7b-0e27aa414878", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1312490542-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f3b50cc219314108945bfc8b2c21849a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7edb7c08-2fae-4df5-9ec6-5ccf06d7e337", "external-id": "nsx-vlan-transportzone-309", "segmentation_id": 309, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e786917-4e", "ovs_interfaceid": "7e786917-4e46-4359-899e-afc1456451ae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1559.404290] env[62405]: DEBUG oslo_vmware.api [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Task: {'id': task-1946919, 'name': PowerOnVM_Task, 'duration_secs': 1.876464} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1559.404567] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] [instance: b21dc1e7-dacd-4154-9bc3-0fa3774695a8] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1559.404788] env[62405]: INFO nova.compute.manager [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] [instance: b21dc1e7-dacd-4154-9bc3-0fa3774695a8] Took 12.22 seconds to spawn the instance on the hypervisor. [ 1559.404970] env[62405]: DEBUG nova.compute.manager [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] [instance: b21dc1e7-dacd-4154-9bc3-0fa3774695a8] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1559.406098] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9e9eb4c-bf94-42ca-93b7-35bb378f3087 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.496769] env[62405]: DEBUG nova.scheduler.client.report [None req-df06cd55-95fe-4b37-813d-25bb8e23e856 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1559.514567] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946921, 'name': CreateVM_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1559.535317] env[62405]: DEBUG oslo_vmware.api [None req-7b88e272-7c72-414d-8240-40a604e31ff5 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': task-1946922, 'name': Destroy_Task} progress is 33%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1559.559389] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Acquiring lock "4d59d9fd-23df-4933-97ed-32602e51e9aa" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1559.559650] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Lock "4d59d9fd-23df-4933-97ed-32602e51e9aa" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1559.720507] env[62405]: DEBUG oslo_vmware.api [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1946923, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.093259} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1559.720836] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: ca4d11fe-1d0f-468b-a2f4-21c5b84342ab] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1559.721682] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a6861ed-6ab8-4338-ae27-9c2a76a75f85 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.743772] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: ca4d11fe-1d0f-468b-a2f4-21c5b84342ab] Reconfiguring VM instance instance-00000018 to attach disk [datastore1] ca4d11fe-1d0f-468b-a2f4-21c5b84342ab/ca4d11fe-1d0f-468b-a2f4-21c5b84342ab.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1559.744162] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a9085e22-5c0d-48a1-9fe8-722341f88cb9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.764119] env[62405]: DEBUG oslo_vmware.api [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Waiting for the task: (returnval){ [ 1559.764119] env[62405]: value = "task-1946924" [ 1559.764119] env[62405]: _type = "Task" [ 1559.764119] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1559.772035] env[62405]: DEBUG oslo_vmware.api [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1946924, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1559.888873] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Releasing lock "refresh_cache-15218373-ffa5-49ce-b604-423b7fc5fb35" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1559.889263] env[62405]: DEBUG nova.compute.manager [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Instance network_info: |[{"id": "7e786917-4e46-4359-899e-afc1456451ae", "address": "fa:16:3e:75:14:e2", "network": {"id": "3ca0a5a2-a18f-47fa-9d7b-0e27aa414878", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1312490542-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f3b50cc219314108945bfc8b2c21849a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7edb7c08-2fae-4df5-9ec6-5ccf06d7e337", "external-id": "nsx-vlan-transportzone-309", "segmentation_id": 309, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e786917-4e", "ovs_interfaceid": "7e786917-4e46-4359-899e-afc1456451ae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1559.889695] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:75:14:e2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7edb7c08-2fae-4df5-9ec6-5ccf06d7e337', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7e786917-4e46-4359-899e-afc1456451ae', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1559.897451] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Creating folder: Project (f3b50cc219314108945bfc8b2c21849a). Parent ref: group-v401284. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1559.897803] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3e28d991-6b63-4a3e-aa18-f707712acdc1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.908153] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Created folder: Project (f3b50cc219314108945bfc8b2c21849a) in parent group-v401284. [ 1559.908551] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Creating folder: Instances. Parent ref: group-v401363. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1559.908660] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9f1a96cc-c329-460c-a21e-b62139cf6191 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.920977] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Created folder: Instances in parent group-v401363. [ 1559.921285] env[62405]: DEBUG oslo.service.loopingcall [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1559.923019] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1559.923624] env[62405]: INFO nova.compute.manager [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] [instance: b21dc1e7-dacd-4154-9bc3-0fa3774695a8] Took 46.50 seconds to build instance. [ 1559.924563] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6c122a02-ba13-48bd-a905-6cb30362ffc7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.941336] env[62405]: DEBUG oslo_concurrency.lockutils [None req-6db8342a-f858-442a-9f6f-225e0b138a64 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Lock "b21dc1e7-dacd-4154-9bc3-0fa3774695a8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 58.855s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1559.947533] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1559.947533] env[62405]: value = "task-1946927" [ 1559.947533] env[62405]: _type = "Task" [ 1559.947533] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1559.958222] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946927, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1559.991997] env[62405]: DEBUG nova.compute.manager [req-7152e257-22f0-436d-ae68-96bc9fe53c3e req-2de6f969-814c-4ba2-bdf3-978824ec8b3f service nova] [instance: 65462c7a-372e-4ba6-8f6d-e300080d65d0] Received event network-vif-plugged-62da0bb8-4a2d-4e69-a4da-3970ca057cad {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1559.993484] env[62405]: DEBUG oslo_concurrency.lockutils [req-7152e257-22f0-436d-ae68-96bc9fe53c3e req-2de6f969-814c-4ba2-bdf3-978824ec8b3f service nova] Acquiring lock "65462c7a-372e-4ba6-8f6d-e300080d65d0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1559.993484] env[62405]: DEBUG oslo_concurrency.lockutils [req-7152e257-22f0-436d-ae68-96bc9fe53c3e req-2de6f969-814c-4ba2-bdf3-978824ec8b3f service nova] Lock "65462c7a-372e-4ba6-8f6d-e300080d65d0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1559.993484] env[62405]: DEBUG oslo_concurrency.lockutils [req-7152e257-22f0-436d-ae68-96bc9fe53c3e req-2de6f969-814c-4ba2-bdf3-978824ec8b3f service nova] Lock "65462c7a-372e-4ba6-8f6d-e300080d65d0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1559.993484] env[62405]: DEBUG nova.compute.manager [req-7152e257-22f0-436d-ae68-96bc9fe53c3e req-2de6f969-814c-4ba2-bdf3-978824ec8b3f service nova] [instance: 65462c7a-372e-4ba6-8f6d-e300080d65d0] No waiting events found dispatching network-vif-plugged-62da0bb8-4a2d-4e69-a4da-3970ca057cad {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1559.993484] env[62405]: WARNING nova.compute.manager [req-7152e257-22f0-436d-ae68-96bc9fe53c3e req-2de6f969-814c-4ba2-bdf3-978824ec8b3f service nova] [instance: 65462c7a-372e-4ba6-8f6d-e300080d65d0] Received unexpected event network-vif-plugged-62da0bb8-4a2d-4e69-a4da-3970ca057cad for instance with vm_state building and task_state spawning. [ 1560.003696] env[62405]: DEBUG oslo_concurrency.lockutils [None req-df06cd55-95fe-4b37-813d-25bb8e23e856 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.165s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1560.008222] env[62405]: DEBUG oslo_concurrency.lockutils [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 37.495s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1560.008222] env[62405]: INFO nova.compute.claims [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: e8ed73c3-fb86-42c3-aae6-b0c8d03149ce] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1560.020765] env[62405]: DEBUG nova.compute.manager [req-b55847ef-9464-4077-8c6a-e67d137ea430 req-01338337-c0d7-421d-b532-08ea84909577 service nova] [instance: ca4d11fe-1d0f-468b-a2f4-21c5b84342ab] Received event network-changed-677da2fd-a16d-4c43-b074-8aee4d0abe46 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1560.020765] env[62405]: DEBUG nova.compute.manager [req-b55847ef-9464-4077-8c6a-e67d137ea430 req-01338337-c0d7-421d-b532-08ea84909577 service nova] [instance: ca4d11fe-1d0f-468b-a2f4-21c5b84342ab] Refreshing instance network info cache due to event network-changed-677da2fd-a16d-4c43-b074-8aee4d0abe46. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1560.020849] env[62405]: DEBUG oslo_concurrency.lockutils [req-b55847ef-9464-4077-8c6a-e67d137ea430 req-01338337-c0d7-421d-b532-08ea84909577 service nova] Acquiring lock "refresh_cache-ca4d11fe-1d0f-468b-a2f4-21c5b84342ab" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1560.021105] env[62405]: DEBUG oslo_concurrency.lockutils [req-b55847ef-9464-4077-8c6a-e67d137ea430 req-01338337-c0d7-421d-b532-08ea84909577 service nova] Acquired lock "refresh_cache-ca4d11fe-1d0f-468b-a2f4-21c5b84342ab" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1560.021372] env[62405]: DEBUG nova.network.neutron [req-b55847ef-9464-4077-8c6a-e67d137ea430 req-01338337-c0d7-421d-b532-08ea84909577 service nova] [instance: ca4d11fe-1d0f-468b-a2f4-21c5b84342ab] Refreshing network info cache for port 677da2fd-a16d-4c43-b074-8aee4d0abe46 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1560.030227] env[62405]: INFO nova.scheduler.client.report [None req-df06cd55-95fe-4b37-813d-25bb8e23e856 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Deleted allocations for instance 8995f9cb-8454-4a98-9090-290f87f8af18 [ 1560.035459] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946921, 'name': CreateVM_Task, 'duration_secs': 1.487976} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1560.041018] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 65462c7a-372e-4ba6-8f6d-e300080d65d0] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1560.041018] env[62405]: DEBUG oslo_concurrency.lockutils [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1560.041018] env[62405]: DEBUG oslo_concurrency.lockutils [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1560.041308] env[62405]: DEBUG oslo_concurrency.lockutils [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1560.045794] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-681b293f-b00c-4e29-bf45-1368c57fcc9c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.049960] env[62405]: DEBUG oslo_vmware.api [None req-7b88e272-7c72-414d-8240-40a604e31ff5 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': task-1946922, 'name': Destroy_Task} progress is 33%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.052635] env[62405]: DEBUG oslo_vmware.api [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Waiting for the task: (returnval){ [ 1560.052635] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52f22380-c9e8-05bd-8389-109f7a6cbec1" [ 1560.052635] env[62405]: _type = "Task" [ 1560.052635] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1560.061978] env[62405]: DEBUG oslo_vmware.api [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52f22380-c9e8-05bd-8389-109f7a6cbec1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.276132] env[62405]: DEBUG oslo_vmware.api [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1946924, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.444419] env[62405]: DEBUG nova.compute.manager [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] [instance: 0feaeb5d-9f4a-4166-99b1-f213bc4fa458] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1560.457657] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946927, 'name': CreateVM_Task} progress is 25%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.537317] env[62405]: DEBUG oslo_vmware.api [None req-7b88e272-7c72-414d-8240-40a604e31ff5 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': task-1946922, 'name': Destroy_Task} progress is 33%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.542866] env[62405]: DEBUG oslo_concurrency.lockutils [None req-df06cd55-95fe-4b37-813d-25bb8e23e856 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Lock "8995f9cb-8454-4a98-9090-290f87f8af18" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 42.436s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1560.562608] env[62405]: DEBUG oslo_vmware.api [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52f22380-c9e8-05bd-8389-109f7a6cbec1, 'name': SearchDatastore_Task, 'duration_secs': 0.029103} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1560.563946] env[62405]: DEBUG oslo_concurrency.lockutils [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1560.564238] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] [instance: 65462c7a-372e-4ba6-8f6d-e300080d65d0] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1560.564532] env[62405]: DEBUG oslo_concurrency.lockutils [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1560.564691] env[62405]: DEBUG oslo_concurrency.lockutils [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1560.564979] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1560.565431] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f6988bab-c2b2-4f53-ae1c-dd86810fe5ac {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.574615] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1560.574844] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1560.575632] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eed86602-1e34-4bf7-bfcb-0f79346bdb1b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.584968] env[62405]: DEBUG oslo_vmware.api [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Waiting for the task: (returnval){ [ 1560.584968] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5247c8c8-9490-da5d-22ce-3dd3dcb1296f" [ 1560.584968] env[62405]: _type = "Task" [ 1560.584968] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1560.595874] env[62405]: DEBUG oslo_vmware.api [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5247c8c8-9490-da5d-22ce-3dd3dcb1296f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.783214] env[62405]: DEBUG oslo_vmware.api [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1946924, 'name': ReconfigVM_Task, 'duration_secs': 0.774032} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1560.783214] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: ca4d11fe-1d0f-468b-a2f4-21c5b84342ab] Reconfigured VM instance instance-00000018 to attach disk [datastore1] ca4d11fe-1d0f-468b-a2f4-21c5b84342ab/ca4d11fe-1d0f-468b-a2f4-21c5b84342ab.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1560.783214] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-449233a1-ee39-4e13-b814-e89d29b9e628 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.792061] env[62405]: DEBUG oslo_vmware.api [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Waiting for the task: (returnval){ [ 1560.792061] env[62405]: value = "task-1946928" [ 1560.792061] env[62405]: _type = "Task" [ 1560.792061] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1560.801697] env[62405]: DEBUG oslo_vmware.api [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1946928, 'name': Rename_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.908393] env[62405]: DEBUG nova.network.neutron [req-b55847ef-9464-4077-8c6a-e67d137ea430 req-01338337-c0d7-421d-b532-08ea84909577 service nova] [instance: ca4d11fe-1d0f-468b-a2f4-21c5b84342ab] Updated VIF entry in instance network info cache for port 677da2fd-a16d-4c43-b074-8aee4d0abe46. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1560.910299] env[62405]: DEBUG nova.network.neutron [req-b55847ef-9464-4077-8c6a-e67d137ea430 req-01338337-c0d7-421d-b532-08ea84909577 service nova] [instance: ca4d11fe-1d0f-468b-a2f4-21c5b84342ab] Updating instance_info_cache with network_info: [{"id": "677da2fd-a16d-4c43-b074-8aee4d0abe46", "address": "fa:16:3e:d6:b9:19", "network": {"id": "ac0e1447-1c61-4770-8006-3a99edc76f93", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1648941742-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bb1da47e8b1a400fab7817d9e6b282ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "298bb8ef-4765-494c-b157-7a349218bd1e", "external-id": "nsx-vlan-transportzone-905", "segmentation_id": 905, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap677da2fd-a1", "ovs_interfaceid": "677da2fd-a16d-4c43-b074-8aee4d0abe46", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1560.969662] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946927, 'name': CreateVM_Task} progress is 25%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.988067] env[62405]: DEBUG oslo_concurrency.lockutils [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1561.039523] env[62405]: DEBUG oslo_vmware.api [None req-7b88e272-7c72-414d-8240-40a604e31ff5 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': task-1946922, 'name': Destroy_Task, 'duration_secs': 2.12579} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1561.040315] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-7b88e272-7c72-414d-8240-40a604e31ff5 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b8ff115b-64f1-4584-afa2-478c5e6b726b] Destroyed the VM [ 1561.040835] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-7b88e272-7c72-414d-8240-40a604e31ff5 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b8ff115b-64f1-4584-afa2-478c5e6b726b] Deleting Snapshot of the VM instance {{(pid=62405) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1561.040835] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-12c54ffe-9b75-4003-833a-454cc8a6b62b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.048552] env[62405]: DEBUG oslo_vmware.api [None req-7b88e272-7c72-414d-8240-40a604e31ff5 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Waiting for the task: (returnval){ [ 1561.048552] env[62405]: value = "task-1946929" [ 1561.048552] env[62405]: _type = "Task" [ 1561.048552] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1561.057495] env[62405]: DEBUG oslo_vmware.api [None req-7b88e272-7c72-414d-8240-40a604e31ff5 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': task-1946929, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1561.099849] env[62405]: DEBUG oslo_vmware.api [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5247c8c8-9490-da5d-22ce-3dd3dcb1296f, 'name': SearchDatastore_Task, 'duration_secs': 0.015111} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1561.100954] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2929bba1-f77e-416a-bccf-1db2fd868aa5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.108010] env[62405]: DEBUG oslo_vmware.api [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Waiting for the task: (returnval){ [ 1561.108010] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5254fc86-7bb7-bd95-49c2-549933b659e0" [ 1561.108010] env[62405]: _type = "Task" [ 1561.108010] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1561.118795] env[62405]: DEBUG oslo_vmware.api [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5254fc86-7bb7-bd95-49c2-549933b659e0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1561.301732] env[62405]: DEBUG oslo_vmware.api [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1946928, 'name': Rename_Task, 'duration_secs': 0.342728} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1561.304748] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: ca4d11fe-1d0f-468b-a2f4-21c5b84342ab] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1561.305454] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a85831e1-f9b3-4046-85b3-c72779a0bd84 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.315024] env[62405]: DEBUG oslo_vmware.api [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Waiting for the task: (returnval){ [ 1561.315024] env[62405]: value = "task-1946930" [ 1561.315024] env[62405]: _type = "Task" [ 1561.315024] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1561.321738] env[62405]: DEBUG oslo_vmware.api [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1946930, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1561.414778] env[62405]: DEBUG oslo_concurrency.lockutils [req-b55847ef-9464-4077-8c6a-e67d137ea430 req-01338337-c0d7-421d-b532-08ea84909577 service nova] Releasing lock "refresh_cache-ca4d11fe-1d0f-468b-a2f4-21c5b84342ab" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1561.464323] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946927, 'name': CreateVM_Task} progress is 25%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1561.554375] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b904d4c5-c650-4505-910a-1ccd766df2e7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.563039] env[62405]: DEBUG oslo_vmware.api [None req-7b88e272-7c72-414d-8240-40a604e31ff5 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': task-1946929, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1561.566063] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e50e8ff8-0971-4320-9553-09aab1aff442 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.603916] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1ce8408-2352-44f0-93ec-c6ce895224d0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.617976] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab0b630e-c9a7-4de8-a4b4-744f44f525f6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.625829] env[62405]: DEBUG oslo_vmware.api [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5254fc86-7bb7-bd95-49c2-549933b659e0, 'name': SearchDatastore_Task, 'duration_secs': 0.018618} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1561.626658] env[62405]: DEBUG oslo_concurrency.lockutils [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1561.627240] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 65462c7a-372e-4ba6-8f6d-e300080d65d0/65462c7a-372e-4ba6-8f6d-e300080d65d0.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1561.627537] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1434eb84-081c-44d6-98f7-6e2d51959b55 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.639334] env[62405]: DEBUG nova.compute.provider_tree [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1561.649447] env[62405]: DEBUG oslo_vmware.api [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Waiting for the task: (returnval){ [ 1561.649447] env[62405]: value = "task-1946931" [ 1561.649447] env[62405]: _type = "Task" [ 1561.649447] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1561.655977] env[62405]: DEBUG oslo_vmware.api [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Task: {'id': task-1946931, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1561.823267] env[62405]: DEBUG oslo_vmware.api [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1946930, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1561.971586] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946927, 'name': CreateVM_Task} progress is 25%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1562.067023] env[62405]: DEBUG oslo_vmware.api [None req-7b88e272-7c72-414d-8240-40a604e31ff5 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': task-1946929, 'name': RemoveSnapshot_Task} progress is 31%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1562.145993] env[62405]: DEBUG nova.scheduler.client.report [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1562.163988] env[62405]: DEBUG oslo_vmware.api [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Task: {'id': task-1946931, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1562.328601] env[62405]: DEBUG oslo_vmware.api [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1946930, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1562.436738] env[62405]: DEBUG nova.compute.manager [req-353c5b7b-b4ff-4e6e-9bda-603290e7d02c req-df1f1086-b366-4d5e-b0db-6e83440a76c2 service nova] [instance: 65462c7a-372e-4ba6-8f6d-e300080d65d0] Received event network-changed-62da0bb8-4a2d-4e69-a4da-3970ca057cad {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1562.437169] env[62405]: DEBUG nova.compute.manager [req-353c5b7b-b4ff-4e6e-9bda-603290e7d02c req-df1f1086-b366-4d5e-b0db-6e83440a76c2 service nova] [instance: 65462c7a-372e-4ba6-8f6d-e300080d65d0] Refreshing instance network info cache due to event network-changed-62da0bb8-4a2d-4e69-a4da-3970ca057cad. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1562.437540] env[62405]: DEBUG oslo_concurrency.lockutils [req-353c5b7b-b4ff-4e6e-9bda-603290e7d02c req-df1f1086-b366-4d5e-b0db-6e83440a76c2 service nova] Acquiring lock "refresh_cache-65462c7a-372e-4ba6-8f6d-e300080d65d0" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1562.437831] env[62405]: DEBUG oslo_concurrency.lockutils [req-353c5b7b-b4ff-4e6e-9bda-603290e7d02c req-df1f1086-b366-4d5e-b0db-6e83440a76c2 service nova] Acquired lock "refresh_cache-65462c7a-372e-4ba6-8f6d-e300080d65d0" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1562.438159] env[62405]: DEBUG nova.network.neutron [req-353c5b7b-b4ff-4e6e-9bda-603290e7d02c req-df1f1086-b366-4d5e-b0db-6e83440a76c2 service nova] [instance: 65462c7a-372e-4ba6-8f6d-e300080d65d0] Refreshing network info cache for port 62da0bb8-4a2d-4e69-a4da-3970ca057cad {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1562.469298] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946927, 'name': CreateVM_Task} progress is 25%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1562.559354] env[62405]: DEBUG oslo_vmware.api [None req-7b88e272-7c72-414d-8240-40a604e31ff5 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': task-1946929, 'name': RemoveSnapshot_Task, 'duration_secs': 1.445454} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1562.559618] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-7b88e272-7c72-414d-8240-40a604e31ff5 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b8ff115b-64f1-4584-afa2-478c5e6b726b] Deleted Snapshot of the VM instance {{(pid=62405) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1562.559847] env[62405]: INFO nova.compute.manager [None req-7b88e272-7c72-414d-8240-40a604e31ff5 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b8ff115b-64f1-4584-afa2-478c5e6b726b] Took 25.28 seconds to snapshot the instance on the hypervisor. [ 1562.658524] env[62405]: DEBUG oslo_concurrency.lockutils [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.652s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1562.658943] env[62405]: DEBUG nova.compute.manager [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: e8ed73c3-fb86-42c3-aae6-b0c8d03149ce] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1562.663127] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3e4d01c4-e5d5-43fc-99ce-5a64916ac21b tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.929s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1562.664738] env[62405]: INFO nova.compute.claims [None req-3e4d01c4-e5d5-43fc-99ce-5a64916ac21b tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1562.679756] env[62405]: DEBUG oslo_vmware.api [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Task: {'id': task-1946931, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.65838} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1562.680293] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 65462c7a-372e-4ba6-8f6d-e300080d65d0/65462c7a-372e-4ba6-8f6d-e300080d65d0.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1562.680293] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] [instance: 65462c7a-372e-4ba6-8f6d-e300080d65d0] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1562.680490] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ae4e3af6-859a-4aeb-9af2-9d582dab506d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.688246] env[62405]: DEBUG oslo_vmware.api [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Waiting for the task: (returnval){ [ 1562.688246] env[62405]: value = "task-1946932" [ 1562.688246] env[62405]: _type = "Task" [ 1562.688246] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1562.705789] env[62405]: DEBUG oslo_vmware.api [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Task: {'id': task-1946932, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1562.825749] env[62405]: DEBUG oslo_vmware.api [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1946930, 'name': PowerOnVM_Task} progress is 1%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1562.974756] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946927, 'name': CreateVM_Task, 'duration_secs': 2.902548} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1562.975101] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1562.975594] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1562.975759] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1562.976106] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1562.976365] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1f07256b-d4bb-4979-a7b5-2430d52823be {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.982253] env[62405]: DEBUG oslo_vmware.api [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for the task: (returnval){ [ 1562.982253] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52843741-39dc-fa0f-61e2-63e0d1465e03" [ 1562.982253] env[62405]: _type = "Task" [ 1562.982253] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1562.992258] env[62405]: DEBUG oslo_vmware.api [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52843741-39dc-fa0f-61e2-63e0d1465e03, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1563.171139] env[62405]: DEBUG nova.compute.utils [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1563.172534] env[62405]: DEBUG nova.compute.manager [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: e8ed73c3-fb86-42c3-aae6-b0c8d03149ce] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1563.172707] env[62405]: DEBUG nova.network.neutron [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: e8ed73c3-fb86-42c3-aae6-b0c8d03149ce] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1563.188025] env[62405]: DEBUG nova.network.neutron [req-353c5b7b-b4ff-4e6e-9bda-603290e7d02c req-df1f1086-b366-4d5e-b0db-6e83440a76c2 service nova] [instance: 65462c7a-372e-4ba6-8f6d-e300080d65d0] Updated VIF entry in instance network info cache for port 62da0bb8-4a2d-4e69-a4da-3970ca057cad. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1563.189104] env[62405]: DEBUG nova.network.neutron [req-353c5b7b-b4ff-4e6e-9bda-603290e7d02c req-df1f1086-b366-4d5e-b0db-6e83440a76c2 service nova] [instance: 65462c7a-372e-4ba6-8f6d-e300080d65d0] Updating instance_info_cache with network_info: [{"id": "62da0bb8-4a2d-4e69-a4da-3970ca057cad", "address": "fa:16:3e:34:fe:9b", "network": {"id": "869979f7-5a22-4c11-bb77-c48a5d5f934f", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1534576533-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6d1aee7c44f44abc86ed5c15b027e989", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08e9585e-6186-4788-9fd9-24174ce45a6f", "external-id": "nsx-vlan-transportzone-254", "segmentation_id": 254, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap62da0bb8-4a", "ovs_interfaceid": "62da0bb8-4a2d-4e69-a4da-3970ca057cad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1563.201178] env[62405]: DEBUG oslo_vmware.api [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Task: {'id': task-1946932, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.094417} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1563.201470] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] [instance: 65462c7a-372e-4ba6-8f6d-e300080d65d0] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1563.202393] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c1e56a5-803a-4d13-ae53-e38e802aaff0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.226022] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] [instance: 65462c7a-372e-4ba6-8f6d-e300080d65d0] Reconfiguring VM instance instance-00000019 to attach disk [datastore1] 65462c7a-372e-4ba6-8f6d-e300080d65d0/65462c7a-372e-4ba6-8f6d-e300080d65d0.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1563.226660] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1277cffd-5e10-489b-bc1d-ad387bfe628d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.242163] env[62405]: DEBUG nova.policy [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ae6ca334510b4445a23dc2fb38215590', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f1a1645e38674042828c78155974f95e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1563.249409] env[62405]: DEBUG oslo_vmware.api [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Waiting for the task: (returnval){ [ 1563.249409] env[62405]: value = "task-1946933" [ 1563.249409] env[62405]: _type = "Task" [ 1563.249409] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1563.259457] env[62405]: DEBUG oslo_vmware.api [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Task: {'id': task-1946933, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1563.327474] env[62405]: DEBUG oslo_vmware.api [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1946930, 'name': PowerOnVM_Task} progress is 37%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1563.492210] env[62405]: DEBUG oslo_vmware.api [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52843741-39dc-fa0f-61e2-63e0d1465e03, 'name': SearchDatastore_Task, 'duration_secs': 0.018699} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1563.492471] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1563.492708] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1563.492945] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1563.493107] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1563.493292] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1563.493585] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f1909879-7b1d-4061-908f-7eb3072ee29a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.503410] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1563.503636] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1563.504349] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4c49d18d-2d85-4003-aef5-f45277445bbc {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.509390] env[62405]: DEBUG oslo_vmware.api [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for the task: (returnval){ [ 1563.509390] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]521dcfea-b792-184b-03c1-c729b2037fbd" [ 1563.509390] env[62405]: _type = "Task" [ 1563.509390] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1563.519469] env[62405]: DEBUG oslo_vmware.api [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]521dcfea-b792-184b-03c1-c729b2037fbd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1563.605256] env[62405]: DEBUG oslo_vmware.rw_handles [None req-bd5321f9-66d9-41b1-b9cb-dd9e8b04008c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5229654f-d0cb-7056-f5fd-3a52b755e9e8/disk-0.vmdk. {{(pid=62405) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1563.609837] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f6d1b1e-94ee-4200-833c-d0c23e2ad95e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.616970] env[62405]: DEBUG oslo_vmware.rw_handles [None req-bd5321f9-66d9-41b1-b9cb-dd9e8b04008c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5229654f-d0cb-7056-f5fd-3a52b755e9e8/disk-0.vmdk is in state: ready. {{(pid=62405) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1563.617221] env[62405]: ERROR oslo_vmware.rw_handles [None req-bd5321f9-66d9-41b1-b9cb-dd9e8b04008c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5229654f-d0cb-7056-f5fd-3a52b755e9e8/disk-0.vmdk due to incomplete transfer. [ 1563.617495] env[62405]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-ccb89864-14fa-47ce-97f5-4db17c0964cb {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.625564] env[62405]: DEBUG oslo_vmware.rw_handles [None req-bd5321f9-66d9-41b1-b9cb-dd9e8b04008c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5229654f-d0cb-7056-f5fd-3a52b755e9e8/disk-0.vmdk. {{(pid=62405) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1563.625766] env[62405]: DEBUG nova.virt.vmwareapi.images [None req-bd5321f9-66d9-41b1-b9cb-dd9e8b04008c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] [instance: fbedaa93-5968-4b42-b93e-201d2b44b32b] Uploaded image cc576845-9d46-42b4-b1ee-897e638ba48c to the Glance image server {{(pid=62405) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1563.627525] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd5321f9-66d9-41b1-b9cb-dd9e8b04008c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] [instance: fbedaa93-5968-4b42-b93e-201d2b44b32b] Destroying the VM {{(pid=62405) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1563.627787] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-52604899-0f95-467e-a655-0e7c07c9cf2d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.635055] env[62405]: DEBUG oslo_vmware.api [None req-bd5321f9-66d9-41b1-b9cb-dd9e8b04008c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Waiting for the task: (returnval){ [ 1563.635055] env[62405]: value = "task-1946934" [ 1563.635055] env[62405]: _type = "Task" [ 1563.635055] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1563.643917] env[62405]: DEBUG oslo_vmware.api [None req-bd5321f9-66d9-41b1-b9cb-dd9e8b04008c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Task: {'id': task-1946934, 'name': Destroy_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1563.676790] env[62405]: DEBUG nova.compute.manager [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: e8ed73c3-fb86-42c3-aae6-b0c8d03149ce] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1563.691247] env[62405]: DEBUG oslo_concurrency.lockutils [req-353c5b7b-b4ff-4e6e-9bda-603290e7d02c req-df1f1086-b366-4d5e-b0db-6e83440a76c2 service nova] Releasing lock "refresh_cache-65462c7a-372e-4ba6-8f6d-e300080d65d0" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1563.691528] env[62405]: DEBUG nova.compute.manager [req-353c5b7b-b4ff-4e6e-9bda-603290e7d02c req-df1f1086-b366-4d5e-b0db-6e83440a76c2 service nova] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Received event network-vif-plugged-7e786917-4e46-4359-899e-afc1456451ae {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1563.691726] env[62405]: DEBUG oslo_concurrency.lockutils [req-353c5b7b-b4ff-4e6e-9bda-603290e7d02c req-df1f1086-b366-4d5e-b0db-6e83440a76c2 service nova] Acquiring lock "15218373-ffa5-49ce-b604-423b7fc5fb35-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1563.691937] env[62405]: DEBUG oslo_concurrency.lockutils [req-353c5b7b-b4ff-4e6e-9bda-603290e7d02c req-df1f1086-b366-4d5e-b0db-6e83440a76c2 service nova] Lock "15218373-ffa5-49ce-b604-423b7fc5fb35-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1563.692124] env[62405]: DEBUG oslo_concurrency.lockutils [req-353c5b7b-b4ff-4e6e-9bda-603290e7d02c req-df1f1086-b366-4d5e-b0db-6e83440a76c2 service nova] Lock "15218373-ffa5-49ce-b604-423b7fc5fb35-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1563.692285] env[62405]: DEBUG nova.compute.manager [req-353c5b7b-b4ff-4e6e-9bda-603290e7d02c req-df1f1086-b366-4d5e-b0db-6e83440a76c2 service nova] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] No waiting events found dispatching network-vif-plugged-7e786917-4e46-4359-899e-afc1456451ae {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1563.692454] env[62405]: WARNING nova.compute.manager [req-353c5b7b-b4ff-4e6e-9bda-603290e7d02c req-df1f1086-b366-4d5e-b0db-6e83440a76c2 service nova] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Received unexpected event network-vif-plugged-7e786917-4e46-4359-899e-afc1456451ae for instance with vm_state building and task_state spawning. [ 1563.692619] env[62405]: DEBUG nova.compute.manager [req-353c5b7b-b4ff-4e6e-9bda-603290e7d02c req-df1f1086-b366-4d5e-b0db-6e83440a76c2 service nova] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Received event network-changed-7e786917-4e46-4359-899e-afc1456451ae {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1563.692773] env[62405]: DEBUG nova.compute.manager [req-353c5b7b-b4ff-4e6e-9bda-603290e7d02c req-df1f1086-b366-4d5e-b0db-6e83440a76c2 service nova] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Refreshing instance network info cache due to event network-changed-7e786917-4e46-4359-899e-afc1456451ae. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1563.692957] env[62405]: DEBUG oslo_concurrency.lockutils [req-353c5b7b-b4ff-4e6e-9bda-603290e7d02c req-df1f1086-b366-4d5e-b0db-6e83440a76c2 service nova] Acquiring lock "refresh_cache-15218373-ffa5-49ce-b604-423b7fc5fb35" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1563.693126] env[62405]: DEBUG oslo_concurrency.lockutils [req-353c5b7b-b4ff-4e6e-9bda-603290e7d02c req-df1f1086-b366-4d5e-b0db-6e83440a76c2 service nova] Acquired lock "refresh_cache-15218373-ffa5-49ce-b604-423b7fc5fb35" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1563.693264] env[62405]: DEBUG nova.network.neutron [req-353c5b7b-b4ff-4e6e-9bda-603290e7d02c req-df1f1086-b366-4d5e-b0db-6e83440a76c2 service nova] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Refreshing network info cache for port 7e786917-4e46-4359-899e-afc1456451ae {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1563.762490] env[62405]: DEBUG oslo_vmware.api [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Task: {'id': task-1946933, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1563.830581] env[62405]: DEBUG oslo_vmware.api [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1946930, 'name': PowerOnVM_Task} progress is 64%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1563.879856] env[62405]: DEBUG nova.network.neutron [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: e8ed73c3-fb86-42c3-aae6-b0c8d03149ce] Successfully created port: ce532b3f-30ef-4d32-b533-7a04d491a6d4 {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1564.021328] env[62405]: DEBUG oslo_vmware.api [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]521dcfea-b792-184b-03c1-c729b2037fbd, 'name': SearchDatastore_Task, 'duration_secs': 0.021934} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1564.022154] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-24716780-afa5-48f8-9e9c-4cda06f18b00 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.033932] env[62405]: DEBUG oslo_vmware.api [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for the task: (returnval){ [ 1564.033932] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]526549b1-a8f8-afc0-838d-4ee42b5ce191" [ 1564.033932] env[62405]: _type = "Task" [ 1564.033932] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1564.041580] env[62405]: DEBUG oslo_vmware.api [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]526549b1-a8f8-afc0-838d-4ee42b5ce191, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1564.149398] env[62405]: DEBUG oslo_vmware.api [None req-bd5321f9-66d9-41b1-b9cb-dd9e8b04008c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Task: {'id': task-1946934, 'name': Destroy_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1564.263380] env[62405]: DEBUG oslo_vmware.api [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Task: {'id': task-1946933, 'name': ReconfigVM_Task, 'duration_secs': 0.591059} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1564.267496] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] [instance: 65462c7a-372e-4ba6-8f6d-e300080d65d0] Reconfigured VM instance instance-00000019 to attach disk [datastore1] 65462c7a-372e-4ba6-8f6d-e300080d65d0/65462c7a-372e-4ba6-8f6d-e300080d65d0.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1564.269131] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-752d3e0c-4873-48c8-9361-bc857b458d68 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.272138] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9427828-852a-44d1-904e-9549df056387 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.284594] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7f6dd71-ad7e-415d-b3fb-c2e691334712 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.288612] env[62405]: DEBUG oslo_vmware.api [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Waiting for the task: (returnval){ [ 1564.288612] env[62405]: value = "task-1946935" [ 1564.288612] env[62405]: _type = "Task" [ 1564.288612] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1564.331725] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a157d299-8b1d-4902-b4bc-ca0fcc727969 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.343331] env[62405]: DEBUG oslo_vmware.api [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Task: {'id': task-1946935, 'name': Rename_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1564.349863] env[62405]: DEBUG oslo_vmware.api [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1946930, 'name': PowerOnVM_Task, 'duration_secs': 2.679491} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1564.352857] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: ca4d11fe-1d0f-468b-a2f4-21c5b84342ab] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1564.353108] env[62405]: INFO nova.compute.manager [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: ca4d11fe-1d0f-468b-a2f4-21c5b84342ab] Took 12.18 seconds to spawn the instance on the hypervisor. [ 1564.353299] env[62405]: DEBUG nova.compute.manager [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: ca4d11fe-1d0f-468b-a2f4-21c5b84342ab] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1564.356126] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-637e54e6-0e3d-4f2c-bae8-06dc2d9289a2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.359574] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb7ef25e-8153-427d-9a99-fed07ce067d5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.379194] env[62405]: DEBUG nova.compute.provider_tree [None req-3e4d01c4-e5d5-43fc-99ce-5a64916ac21b tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1564.546256] env[62405]: DEBUG oslo_vmware.api [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]526549b1-a8f8-afc0-838d-4ee42b5ce191, 'name': SearchDatastore_Task, 'duration_secs': 0.05812} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1564.546502] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1564.547499] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 15218373-ffa5-49ce-b604-423b7fc5fb35/15218373-ffa5-49ce-b604-423b7fc5fb35.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1564.547499] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a55052f6-9628-4839-8c7c-44f3c8c217cc {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.555061] env[62405]: DEBUG oslo_vmware.api [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for the task: (returnval){ [ 1564.555061] env[62405]: value = "task-1946936" [ 1564.555061] env[62405]: _type = "Task" [ 1564.555061] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1564.566555] env[62405]: DEBUG oslo_vmware.api [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1946936, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1564.591800] env[62405]: DEBUG nova.network.neutron [req-353c5b7b-b4ff-4e6e-9bda-603290e7d02c req-df1f1086-b366-4d5e-b0db-6e83440a76c2 service nova] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Updated VIF entry in instance network info cache for port 7e786917-4e46-4359-899e-afc1456451ae. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1564.592321] env[62405]: DEBUG nova.network.neutron [req-353c5b7b-b4ff-4e6e-9bda-603290e7d02c req-df1f1086-b366-4d5e-b0db-6e83440a76c2 service nova] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Updating instance_info_cache with network_info: [{"id": "7e786917-4e46-4359-899e-afc1456451ae", "address": "fa:16:3e:75:14:e2", "network": {"id": "3ca0a5a2-a18f-47fa-9d7b-0e27aa414878", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1312490542-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f3b50cc219314108945bfc8b2c21849a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7edb7c08-2fae-4df5-9ec6-5ccf06d7e337", "external-id": "nsx-vlan-transportzone-309", "segmentation_id": 309, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e786917-4e", "ovs_interfaceid": "7e786917-4e46-4359-899e-afc1456451ae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1564.646753] env[62405]: DEBUG oslo_vmware.api [None req-bd5321f9-66d9-41b1-b9cb-dd9e8b04008c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Task: {'id': task-1946934, 'name': Destroy_Task, 'duration_secs': 0.95243} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1564.647233] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-bd5321f9-66d9-41b1-b9cb-dd9e8b04008c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] [instance: fbedaa93-5968-4b42-b93e-201d2b44b32b] Destroyed the VM [ 1564.647717] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-bd5321f9-66d9-41b1-b9cb-dd9e8b04008c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] [instance: fbedaa93-5968-4b42-b93e-201d2b44b32b] Deleting Snapshot of the VM instance {{(pid=62405) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1564.647932] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-44960762-3edd-422e-bbf9-734aa837a823 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.658608] env[62405]: DEBUG oslo_vmware.api [None req-bd5321f9-66d9-41b1-b9cb-dd9e8b04008c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Waiting for the task: (returnval){ [ 1564.658608] env[62405]: value = "task-1946937" [ 1564.658608] env[62405]: _type = "Task" [ 1564.658608] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1564.667386] env[62405]: DEBUG oslo_vmware.api [None req-bd5321f9-66d9-41b1-b9cb-dd9e8b04008c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Task: {'id': task-1946937, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1564.698269] env[62405]: DEBUG nova.compute.manager [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: e8ed73c3-fb86-42c3-aae6-b0c8d03149ce] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1564.726745] env[62405]: DEBUG nova.virt.hardware [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1564.727081] env[62405]: DEBUG nova.virt.hardware [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1564.727327] env[62405]: DEBUG nova.virt.hardware [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1564.727624] env[62405]: DEBUG nova.virt.hardware [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1564.727799] env[62405]: DEBUG nova.virt.hardware [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1564.728056] env[62405]: DEBUG nova.virt.hardware [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1564.728304] env[62405]: DEBUG nova.virt.hardware [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1564.728480] env[62405]: DEBUG nova.virt.hardware [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1564.728657] env[62405]: DEBUG nova.virt.hardware [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1564.728836] env[62405]: DEBUG nova.virt.hardware [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1564.729028] env[62405]: DEBUG nova.virt.hardware [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1564.729956] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0bbdf43-184b-4a5f-8611-adb0eedcfbe5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.738624] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27347bb5-ddf0-4107-9ee9-834064651eb3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.798793] env[62405]: DEBUG oslo_vmware.api [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Task: {'id': task-1946935, 'name': Rename_Task, 'duration_secs': 0.22629} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1564.799174] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] [instance: 65462c7a-372e-4ba6-8f6d-e300080d65d0] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1564.799435] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-400b9555-cb95-455f-99d9-1a19ef0e928d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.805976] env[62405]: DEBUG oslo_vmware.api [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Waiting for the task: (returnval){ [ 1564.805976] env[62405]: value = "task-1946938" [ 1564.805976] env[62405]: _type = "Task" [ 1564.805976] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1564.814454] env[62405]: DEBUG oslo_vmware.api [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Task: {'id': task-1946938, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1564.885166] env[62405]: DEBUG nova.scheduler.client.report [None req-3e4d01c4-e5d5-43fc-99ce-5a64916ac21b tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1564.897323] env[62405]: INFO nova.compute.manager [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: ca4d11fe-1d0f-468b-a2f4-21c5b84342ab] Took 50.54 seconds to build instance. [ 1565.069804] env[62405]: DEBUG oslo_vmware.api [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1946936, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1565.095929] env[62405]: DEBUG oslo_concurrency.lockutils [req-353c5b7b-b4ff-4e6e-9bda-603290e7d02c req-df1f1086-b366-4d5e-b0db-6e83440a76c2 service nova] Releasing lock "refresh_cache-15218373-ffa5-49ce-b604-423b7fc5fb35" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1565.095929] env[62405]: DEBUG nova.compute.manager [req-353c5b7b-b4ff-4e6e-9bda-603290e7d02c req-df1f1086-b366-4d5e-b0db-6e83440a76c2 service nova] [instance: f8c6f99f-499f-4886-aae9-5f08969175f6] Received event network-changed-1b9bb4b7-58d5-4182-ad5b-0a10e3a34546 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1565.098097] env[62405]: DEBUG nova.compute.manager [req-353c5b7b-b4ff-4e6e-9bda-603290e7d02c req-df1f1086-b366-4d5e-b0db-6e83440a76c2 service nova] [instance: f8c6f99f-499f-4886-aae9-5f08969175f6] Refreshing instance network info cache due to event network-changed-1b9bb4b7-58d5-4182-ad5b-0a10e3a34546. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1565.098097] env[62405]: DEBUG oslo_concurrency.lockutils [req-353c5b7b-b4ff-4e6e-9bda-603290e7d02c req-df1f1086-b366-4d5e-b0db-6e83440a76c2 service nova] Acquiring lock "refresh_cache-f8c6f99f-499f-4886-aae9-5f08969175f6" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1565.098097] env[62405]: DEBUG oslo_concurrency.lockutils [req-353c5b7b-b4ff-4e6e-9bda-603290e7d02c req-df1f1086-b366-4d5e-b0db-6e83440a76c2 service nova] Acquired lock "refresh_cache-f8c6f99f-499f-4886-aae9-5f08969175f6" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1565.098097] env[62405]: DEBUG nova.network.neutron [req-353c5b7b-b4ff-4e6e-9bda-603290e7d02c req-df1f1086-b366-4d5e-b0db-6e83440a76c2 service nova] [instance: f8c6f99f-499f-4886-aae9-5f08969175f6] Refreshing network info cache for port 1b9bb4b7-58d5-4182-ad5b-0a10e3a34546 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1565.172271] env[62405]: DEBUG oslo_vmware.api [None req-bd5321f9-66d9-41b1-b9cb-dd9e8b04008c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Task: {'id': task-1946937, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1565.321424] env[62405]: DEBUG oslo_vmware.api [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Task: {'id': task-1946938, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1565.352477] env[62405]: DEBUG nova.compute.manager [None req-180e9cc9-d85e-4c76-b477-828efa2e0af1 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b3647042-89a1-4d15-b85e-49a5c8def1d4] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1565.353362] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-590ad31f-aa02-4e78-88b2-f02ae593b15e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.390903] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3e4d01c4-e5d5-43fc-99ce-5a64916ac21b tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.728s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1565.391358] env[62405]: DEBUG nova.compute.manager [None req-3e4d01c4-e5d5-43fc-99ce-5a64916ac21b tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1565.397049] env[62405]: DEBUG oslo_concurrency.lockutils [None req-47ff8dfd-ab05-4f7c-b87a-2b9d28fef598 tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.897s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1565.397049] env[62405]: DEBUG nova.objects.instance [None req-47ff8dfd-ab05-4f7c-b87a-2b9d28fef598 tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Lazy-loading 'resources' on Instance uuid 6199de01-baca-4461-9572-111eda11adac {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1565.400969] env[62405]: DEBUG oslo_concurrency.lockutils [None req-84245c7e-e68b-4a25-93c3-16b1318e6fa1 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Lock "ca4d11fe-1d0f-468b-a2f4-21c5b84342ab" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 62.367s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1565.567387] env[62405]: DEBUG oslo_vmware.api [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1946936, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.685827} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1565.567670] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 15218373-ffa5-49ce-b604-423b7fc5fb35/15218373-ffa5-49ce-b604-423b7fc5fb35.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1565.567886] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1565.568149] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-44bcef56-703d-45fa-9ce3-39d7191d6afb {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.575706] env[62405]: DEBUG oslo_vmware.api [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for the task: (returnval){ [ 1565.575706] env[62405]: value = "task-1946939" [ 1565.575706] env[62405]: _type = "Task" [ 1565.575706] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1565.584194] env[62405]: DEBUG oslo_vmware.api [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1946939, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1565.671635] env[62405]: DEBUG oslo_vmware.api [None req-bd5321f9-66d9-41b1-b9cb-dd9e8b04008c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Task: {'id': task-1946937, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1565.678110] env[62405]: DEBUG nova.network.neutron [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: e8ed73c3-fb86-42c3-aae6-b0c8d03149ce] Successfully updated port: ce532b3f-30ef-4d32-b533-7a04d491a6d4 {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1565.688785] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0aecbc13-26c7-479c-b021-1231aaf03970 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Acquiring lock "ca4d11fe-1d0f-468b-a2f4-21c5b84342ab" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1565.690111] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0aecbc13-26c7-479c-b021-1231aaf03970 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Lock "ca4d11fe-1d0f-468b-a2f4-21c5b84342ab" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1565.690111] env[62405]: DEBUG nova.compute.manager [None req-0aecbc13-26c7-479c-b021-1231aaf03970 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: ca4d11fe-1d0f-468b-a2f4-21c5b84342ab] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1565.691663] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8998273-38e5-40a2-a412-8154da3e354f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.700488] env[62405]: DEBUG nova.compute.manager [None req-0aecbc13-26c7-479c-b021-1231aaf03970 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: ca4d11fe-1d0f-468b-a2f4-21c5b84342ab] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62405) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1565.700488] env[62405]: DEBUG nova.objects.instance [None req-0aecbc13-26c7-479c-b021-1231aaf03970 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Lazy-loading 'flavor' on Instance uuid ca4d11fe-1d0f-468b-a2f4-21c5b84342ab {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1565.746175] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8aeb1831-33a5-4b84-9f03-3c806576c207 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Acquiring lock "ca0aca02-4b99-4393-900c-b9cb0dad55c7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1565.746674] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8aeb1831-33a5-4b84-9f03-3c806576c207 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Lock "ca0aca02-4b99-4393-900c-b9cb0dad55c7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1565.746674] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8aeb1831-33a5-4b84-9f03-3c806576c207 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Acquiring lock "ca0aca02-4b99-4393-900c-b9cb0dad55c7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1565.746863] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8aeb1831-33a5-4b84-9f03-3c806576c207 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Lock "ca0aca02-4b99-4393-900c-b9cb0dad55c7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1565.747117] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8aeb1831-33a5-4b84-9f03-3c806576c207 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Lock "ca0aca02-4b99-4393-900c-b9cb0dad55c7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1565.749259] env[62405]: INFO nova.compute.manager [None req-8aeb1831-33a5-4b84-9f03-3c806576c207 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: ca0aca02-4b99-4393-900c-b9cb0dad55c7] Terminating instance [ 1565.823634] env[62405]: DEBUG oslo_vmware.api [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Task: {'id': task-1946938, 'name': PowerOnVM_Task, 'duration_secs': 0.536067} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1565.823923] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] [instance: 65462c7a-372e-4ba6-8f6d-e300080d65d0] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1565.824161] env[62405]: INFO nova.compute.manager [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] [instance: 65462c7a-372e-4ba6-8f6d-e300080d65d0] Took 10.89 seconds to spawn the instance on the hypervisor. [ 1565.824344] env[62405]: DEBUG nova.compute.manager [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] [instance: 65462c7a-372e-4ba6-8f6d-e300080d65d0] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1565.825314] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85ae4df2-de4c-482d-b38d-1e8e6511a2d2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.864609] env[62405]: INFO nova.compute.manager [None req-180e9cc9-d85e-4c76-b477-828efa2e0af1 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b3647042-89a1-4d15-b85e-49a5c8def1d4] instance snapshotting [ 1565.867532] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3005cbc2-025d-4855-a0b0-cfa232ebb6ac {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.889907] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c462626-5fcf-481d-b1b0-823ab64300fc {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.900742] env[62405]: DEBUG nova.compute.utils [None req-3e4d01c4-e5d5-43fc-99ce-5a64916ac21b tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1565.906513] env[62405]: DEBUG nova.compute.manager [None req-3e4d01c4-e5d5-43fc-99ce-5a64916ac21b tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1565.906791] env[62405]: DEBUG nova.network.neutron [None req-3e4d01c4-e5d5-43fc-99ce-5a64916ac21b tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1565.910074] env[62405]: DEBUG nova.compute.manager [None req-664be70a-5a90-415b-93d8-68e8e14e073c tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] [instance: a73579d1-8647-49fe-98ce-0baffd1a558f] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1565.956902] env[62405]: DEBUG nova.policy [None req-3e4d01c4-e5d5-43fc-99ce-5a64916ac21b tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bfd7797f3fbb42b7a4d9dae229c00f7f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3c96e0244edf49db9cd520b5e359fc87', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1566.091265] env[62405]: DEBUG oslo_vmware.api [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1946939, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.140535} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1566.091486] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1566.092152] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30605b23-546f-4e81-97cc-6d703a3ecbce {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.098985] env[62405]: DEBUG nova.compute.manager [req-8f8d9db8-fcc9-4d87-a4bc-ffe907313e1b req-d64b8c60-9c02-464f-b3e4-2c2dd182d1ed service nova] [instance: e8ed73c3-fb86-42c3-aae6-b0c8d03149ce] Received event network-vif-plugged-ce532b3f-30ef-4d32-b533-7a04d491a6d4 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1566.099215] env[62405]: DEBUG oslo_concurrency.lockutils [req-8f8d9db8-fcc9-4d87-a4bc-ffe907313e1b req-d64b8c60-9c02-464f-b3e4-2c2dd182d1ed service nova] Acquiring lock "e8ed73c3-fb86-42c3-aae6-b0c8d03149ce-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1566.099422] env[62405]: DEBUG oslo_concurrency.lockutils [req-8f8d9db8-fcc9-4d87-a4bc-ffe907313e1b req-d64b8c60-9c02-464f-b3e4-2c2dd182d1ed service nova] Lock "e8ed73c3-fb86-42c3-aae6-b0c8d03149ce-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1566.099591] env[62405]: DEBUG oslo_concurrency.lockutils [req-8f8d9db8-fcc9-4d87-a4bc-ffe907313e1b req-d64b8c60-9c02-464f-b3e4-2c2dd182d1ed service nova] Lock "e8ed73c3-fb86-42c3-aae6-b0c8d03149ce-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1566.099761] env[62405]: DEBUG nova.compute.manager [req-8f8d9db8-fcc9-4d87-a4bc-ffe907313e1b req-d64b8c60-9c02-464f-b3e4-2c2dd182d1ed service nova] [instance: e8ed73c3-fb86-42c3-aae6-b0c8d03149ce] No waiting events found dispatching network-vif-plugged-ce532b3f-30ef-4d32-b533-7a04d491a6d4 {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1566.099919] env[62405]: WARNING nova.compute.manager [req-8f8d9db8-fcc9-4d87-a4bc-ffe907313e1b req-d64b8c60-9c02-464f-b3e4-2c2dd182d1ed service nova] [instance: e8ed73c3-fb86-42c3-aae6-b0c8d03149ce] Received unexpected event network-vif-plugged-ce532b3f-30ef-4d32-b533-7a04d491a6d4 for instance with vm_state building and task_state spawning. [ 1566.122807] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Reconfiguring VM instance instance-0000001a to attach disk [datastore1] 15218373-ffa5-49ce-b604-423b7fc5fb35/15218373-ffa5-49ce-b604-423b7fc5fb35.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1566.126297] env[62405]: DEBUG nova.network.neutron [req-353c5b7b-b4ff-4e6e-9bda-603290e7d02c req-df1f1086-b366-4d5e-b0db-6e83440a76c2 service nova] [instance: f8c6f99f-499f-4886-aae9-5f08969175f6] Updated VIF entry in instance network info cache for port 1b9bb4b7-58d5-4182-ad5b-0a10e3a34546. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1566.126766] env[62405]: DEBUG nova.network.neutron [req-353c5b7b-b4ff-4e6e-9bda-603290e7d02c req-df1f1086-b366-4d5e-b0db-6e83440a76c2 service nova] [instance: f8c6f99f-499f-4886-aae9-5f08969175f6] Updating instance_info_cache with network_info: [{"id": "1b9bb4b7-58d5-4182-ad5b-0a10e3a34546", "address": "fa:16:3e:39:9b:cb", "network": {"id": "1df319a9-509c-4707-ad01-fc460b61420b", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-2109870983-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.206", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4d7f990209644a10a5b12c49517e0196", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "132fdc50-e144-4a9b-8d77-6378eec02d9b", "external-id": "nsx-vlan-transportzone-118", "segmentation_id": 118, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b9bb4b7-58", "ovs_interfaceid": "1b9bb4b7-58d5-4182-ad5b-0a10e3a34546", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1566.128467] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7354da0f-680c-438d-b984-8cd3c258ca8b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.149633] env[62405]: DEBUG oslo_vmware.api [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for the task: (returnval){ [ 1566.149633] env[62405]: value = "task-1946940" [ 1566.149633] env[62405]: _type = "Task" [ 1566.149633] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1566.161558] env[62405]: DEBUG oslo_vmware.api [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1946940, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1566.175169] env[62405]: DEBUG oslo_vmware.api [None req-bd5321f9-66d9-41b1-b9cb-dd9e8b04008c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Task: {'id': task-1946937, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1566.181110] env[62405]: DEBUG oslo_concurrency.lockutils [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquiring lock "refresh_cache-e8ed73c3-fb86-42c3-aae6-b0c8d03149ce" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1566.181259] env[62405]: DEBUG oslo_concurrency.lockutils [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquired lock "refresh_cache-e8ed73c3-fb86-42c3-aae6-b0c8d03149ce" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1566.181416] env[62405]: DEBUG nova.network.neutron [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: e8ed73c3-fb86-42c3-aae6-b0c8d03149ce] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1566.255024] env[62405]: DEBUG nova.compute.manager [None req-8aeb1831-33a5-4b84-9f03-3c806576c207 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: ca0aca02-4b99-4393-900c-b9cb0dad55c7] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1566.255024] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-8aeb1831-33a5-4b84-9f03-3c806576c207 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: ca0aca02-4b99-4393-900c-b9cb0dad55c7] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1566.256670] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6966839d-2423-4d83-a16e-aa9159c5dba7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.268154] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-8aeb1831-33a5-4b84-9f03-3c806576c207 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: ca0aca02-4b99-4393-900c-b9cb0dad55c7] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1566.269034] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cc5c289e-bf13-4a86-9d4b-1740db5e83d7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.276651] env[62405]: DEBUG oslo_vmware.api [None req-8aeb1831-33a5-4b84-9f03-3c806576c207 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Waiting for the task: (returnval){ [ 1566.276651] env[62405]: value = "task-1946941" [ 1566.276651] env[62405]: _type = "Task" [ 1566.276651] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1566.288904] env[62405]: DEBUG oslo_vmware.api [None req-8aeb1831-33a5-4b84-9f03-3c806576c207 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Task: {'id': task-1946941, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1566.347364] env[62405]: INFO nova.compute.manager [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] [instance: 65462c7a-372e-4ba6-8f6d-e300080d65d0] Took 50.24 seconds to build instance. [ 1566.411367] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-180e9cc9-d85e-4c76-b477-828efa2e0af1 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b3647042-89a1-4d15-b85e-49a5c8def1d4] Creating Snapshot of the VM instance {{(pid=62405) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1566.413452] env[62405]: DEBUG nova.compute.manager [None req-3e4d01c4-e5d5-43fc-99ce-5a64916ac21b tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1566.416048] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-79da691b-8778-4583-9857-18b99973215e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.425795] env[62405]: DEBUG oslo_concurrency.lockutils [None req-1d13a535-5f61-4ffa-b78f-a8c9ac45767e tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Acquiring lock "d5686d7c-a73f-4e02-8726-eab8221a0eae" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1566.426089] env[62405]: DEBUG oslo_concurrency.lockutils [None req-1d13a535-5f61-4ffa-b78f-a8c9ac45767e tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Lock "d5686d7c-a73f-4e02-8726-eab8221a0eae" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1566.426129] env[62405]: DEBUG oslo_concurrency.lockutils [None req-658dcdfc-19ab-4f5d-8a1b-f6e6dfce33ff tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Acquiring lock "a1d35009-ea11-4e64-bbe4-604ed39d08f4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1566.426333] env[62405]: DEBUG oslo_concurrency.lockutils [None req-658dcdfc-19ab-4f5d-8a1b-f6e6dfce33ff tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Lock "a1d35009-ea11-4e64-bbe4-604ed39d08f4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1566.426494] env[62405]: DEBUG oslo_concurrency.lockutils [None req-658dcdfc-19ab-4f5d-8a1b-f6e6dfce33ff tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Acquiring lock "a1d35009-ea11-4e64-bbe4-604ed39d08f4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1566.426641] env[62405]: DEBUG oslo_concurrency.lockutils [None req-658dcdfc-19ab-4f5d-8a1b-f6e6dfce33ff tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Lock "a1d35009-ea11-4e64-bbe4-604ed39d08f4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1566.426806] env[62405]: DEBUG oslo_concurrency.lockutils [None req-658dcdfc-19ab-4f5d-8a1b-f6e6dfce33ff tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Lock "a1d35009-ea11-4e64-bbe4-604ed39d08f4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1566.433232] env[62405]: INFO nova.compute.manager [None req-658dcdfc-19ab-4f5d-8a1b-f6e6dfce33ff tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: a1d35009-ea11-4e64-bbe4-604ed39d08f4] Terminating instance [ 1566.438221] env[62405]: DEBUG oslo_vmware.api [None req-180e9cc9-d85e-4c76-b477-828efa2e0af1 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Waiting for the task: (returnval){ [ 1566.438221] env[62405]: value = "task-1946942" [ 1566.438221] env[62405]: _type = "Task" [ 1566.438221] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1566.449642] env[62405]: DEBUG oslo_vmware.api [None req-180e9cc9-d85e-4c76-b477-828efa2e0af1 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': task-1946942, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1566.455954] env[62405]: DEBUG oslo_concurrency.lockutils [None req-664be70a-5a90-415b-93d8-68e8e14e073c tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1566.498153] env[62405]: DEBUG oslo_concurrency.lockutils [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Acquiring lock "8ea521a4-7d4c-4f0f-9bf6-44b8559eefd6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1566.498462] env[62405]: DEBUG oslo_concurrency.lockutils [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Lock "8ea521a4-7d4c-4f0f-9bf6-44b8559eefd6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1566.507210] env[62405]: DEBUG nova.network.neutron [None req-3e4d01c4-e5d5-43fc-99ce-5a64916ac21b tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] Successfully created port: b5922da6-f3d2-478a-8756-ea7020186366 {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1566.564103] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-717c97c0-2f61-4fd4-8862-943d383d7f3b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.574172] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e58d2dbf-2305-4e9d-bd89-64c14cb1b651 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.610922] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a58050c5-592d-456b-916d-77837c6eef1f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.621290] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e89e558-dab3-42d9-bbac-51a2b953a09e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.642955] env[62405]: DEBUG nova.compute.provider_tree [None req-47ff8dfd-ab05-4f7c-b87a-2b9d28fef598 tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1566.645522] env[62405]: DEBUG oslo_concurrency.lockutils [req-353c5b7b-b4ff-4e6e-9bda-603290e7d02c req-df1f1086-b366-4d5e-b0db-6e83440a76c2 service nova] Releasing lock "refresh_cache-f8c6f99f-499f-4886-aae9-5f08969175f6" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1566.645945] env[62405]: DEBUG nova.compute.manager [req-353c5b7b-b4ff-4e6e-9bda-603290e7d02c req-df1f1086-b366-4d5e-b0db-6e83440a76c2 service nova] [instance: b21dc1e7-dacd-4154-9bc3-0fa3774695a8] Received event network-changed-e3b36820-3fc9-4b42-820d-9018b302c322 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1566.646293] env[62405]: DEBUG nova.compute.manager [req-353c5b7b-b4ff-4e6e-9bda-603290e7d02c req-df1f1086-b366-4d5e-b0db-6e83440a76c2 service nova] [instance: b21dc1e7-dacd-4154-9bc3-0fa3774695a8] Refreshing instance network info cache due to event network-changed-e3b36820-3fc9-4b42-820d-9018b302c322. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1566.646648] env[62405]: DEBUG oslo_concurrency.lockutils [req-353c5b7b-b4ff-4e6e-9bda-603290e7d02c req-df1f1086-b366-4d5e-b0db-6e83440a76c2 service nova] Acquiring lock "refresh_cache-b21dc1e7-dacd-4154-9bc3-0fa3774695a8" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1566.646917] env[62405]: DEBUG oslo_concurrency.lockutils [req-353c5b7b-b4ff-4e6e-9bda-603290e7d02c req-df1f1086-b366-4d5e-b0db-6e83440a76c2 service nova] Acquired lock "refresh_cache-b21dc1e7-dacd-4154-9bc3-0fa3774695a8" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1566.647216] env[62405]: DEBUG nova.network.neutron [req-353c5b7b-b4ff-4e6e-9bda-603290e7d02c req-df1f1086-b366-4d5e-b0db-6e83440a76c2 service nova] [instance: b21dc1e7-dacd-4154-9bc3-0fa3774695a8] Refreshing network info cache for port e3b36820-3fc9-4b42-820d-9018b302c322 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1566.665916] env[62405]: DEBUG oslo_vmware.api [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1946940, 'name': ReconfigVM_Task, 'duration_secs': 0.511203} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1566.669239] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Reconfigured VM instance instance-0000001a to attach disk [datastore1] 15218373-ffa5-49ce-b604-423b7fc5fb35/15218373-ffa5-49ce-b604-423b7fc5fb35.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1566.670200] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-71695893-4e70-4f1c-971c-49053dfd8563 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.676480] env[62405]: DEBUG oslo_vmware.api [None req-bd5321f9-66d9-41b1-b9cb-dd9e8b04008c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Task: {'id': task-1946937, 'name': RemoveSnapshot_Task, 'duration_secs': 1.890177} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1566.677964] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-bd5321f9-66d9-41b1-b9cb-dd9e8b04008c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] [instance: fbedaa93-5968-4b42-b93e-201d2b44b32b] Deleted Snapshot of the VM instance {{(pid=62405) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1566.678218] env[62405]: INFO nova.compute.manager [None req-bd5321f9-66d9-41b1-b9cb-dd9e8b04008c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] [instance: fbedaa93-5968-4b42-b93e-201d2b44b32b] Took 23.70 seconds to snapshot the instance on the hypervisor. [ 1566.680591] env[62405]: DEBUG oslo_vmware.api [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for the task: (returnval){ [ 1566.680591] env[62405]: value = "task-1946943" [ 1566.680591] env[62405]: _type = "Task" [ 1566.680591] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1566.690786] env[62405]: DEBUG oslo_vmware.api [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1946943, 'name': Rename_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1566.709125] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-0aecbc13-26c7-479c-b021-1231aaf03970 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: ca4d11fe-1d0f-468b-a2f4-21c5b84342ab] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1566.709461] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5d717a1b-cafe-4cda-b39b-7ee3aaffeda4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.716452] env[62405]: DEBUG oslo_vmware.api [None req-0aecbc13-26c7-479c-b021-1231aaf03970 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Waiting for the task: (returnval){ [ 1566.716452] env[62405]: value = "task-1946944" [ 1566.716452] env[62405]: _type = "Task" [ 1566.716452] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1566.727792] env[62405]: DEBUG oslo_vmware.api [None req-0aecbc13-26c7-479c-b021-1231aaf03970 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1946944, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1566.729664] env[62405]: DEBUG nova.network.neutron [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: e8ed73c3-fb86-42c3-aae6-b0c8d03149ce] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1566.787532] env[62405]: DEBUG oslo_vmware.api [None req-8aeb1831-33a5-4b84-9f03-3c806576c207 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Task: {'id': task-1946941, 'name': PowerOffVM_Task, 'duration_secs': 0.352242} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1566.787799] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-8aeb1831-33a5-4b84-9f03-3c806576c207 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: ca0aca02-4b99-4393-900c-b9cb0dad55c7] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1566.787968] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-8aeb1831-33a5-4b84-9f03-3c806576c207 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: ca0aca02-4b99-4393-900c-b9cb0dad55c7] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1566.788242] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cd448c11-0f11-471a-8148-9257761f83b1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.849569] env[62405]: DEBUG oslo_concurrency.lockutils [None req-bcd7a81a-623c-4ebb-8572-514c77f8bc72 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Lock "65462c7a-372e-4ba6-8f6d-e300080d65d0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 62.925s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1566.891166] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-8aeb1831-33a5-4b84-9f03-3c806576c207 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: ca0aca02-4b99-4393-900c-b9cb0dad55c7] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1566.891427] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-8aeb1831-33a5-4b84-9f03-3c806576c207 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: ca0aca02-4b99-4393-900c-b9cb0dad55c7] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1566.891659] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-8aeb1831-33a5-4b84-9f03-3c806576c207 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Deleting the datastore file [datastore1] ca0aca02-4b99-4393-900c-b9cb0dad55c7 {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1566.892691] env[62405]: DEBUG nova.network.neutron [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: e8ed73c3-fb86-42c3-aae6-b0c8d03149ce] Updating instance_info_cache with network_info: [{"id": "ce532b3f-30ef-4d32-b533-7a04d491a6d4", "address": "fa:16:3e:a2:aa:a6", "network": {"id": "845c4582-a0c8-4565-8025-5cc0fd22ff9a", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1066509768-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f1a1645e38674042828c78155974f95e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapce532b3f-30", "ovs_interfaceid": "ce532b3f-30ef-4d32-b533-7a04d491a6d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1566.893886] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a3a83280-668b-4e0e-aea6-8b8840ee49ad {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.901250] env[62405]: DEBUG oslo_vmware.api [None req-8aeb1831-33a5-4b84-9f03-3c806576c207 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Waiting for the task: (returnval){ [ 1566.901250] env[62405]: value = "task-1946946" [ 1566.901250] env[62405]: _type = "Task" [ 1566.901250] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1566.910224] env[62405]: DEBUG oslo_vmware.api [None req-8aeb1831-33a5-4b84-9f03-3c806576c207 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Task: {'id': task-1946946, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1566.925748] env[62405]: INFO nova.virt.block_device [None req-3e4d01c4-e5d5-43fc-99ce-5a64916ac21b tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] Booting with volume 2c06d022-a782-4194-9dee-348bf3888516 at /dev/sda [ 1566.938737] env[62405]: DEBUG nova.compute.manager [None req-658dcdfc-19ab-4f5d-8a1b-f6e6dfce33ff tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: a1d35009-ea11-4e64-bbe4-604ed39d08f4] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1566.938954] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-658dcdfc-19ab-4f5d-8a1b-f6e6dfce33ff tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: a1d35009-ea11-4e64-bbe4-604ed39d08f4] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1566.939857] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5a59b93-7331-4bed-81b2-bfe04bbde2a4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.961915] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-658dcdfc-19ab-4f5d-8a1b-f6e6dfce33ff tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: a1d35009-ea11-4e64-bbe4-604ed39d08f4] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1566.962252] env[62405]: DEBUG oslo_vmware.api [None req-180e9cc9-d85e-4c76-b477-828efa2e0af1 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': task-1946942, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1566.962478] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-384304f5-a6fe-42e0-85dd-f241abb3f3d8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.970034] env[62405]: DEBUG oslo_vmware.api [None req-658dcdfc-19ab-4f5d-8a1b-f6e6dfce33ff tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Waiting for the task: (returnval){ [ 1566.970034] env[62405]: value = "task-1946947" [ 1566.970034] env[62405]: _type = "Task" [ 1566.970034] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1566.975687] env[62405]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1b4c24f3-a9e3-4bb8-94ee-fd5b4043788d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.988566] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c107eb7a-6cc4-41dc-8df5-a402d67c7d83 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.001521] env[62405]: DEBUG oslo_vmware.api [None req-658dcdfc-19ab-4f5d-8a1b-f6e6dfce33ff tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Task: {'id': task-1946947, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1567.029388] env[62405]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-db54bc01-a9a7-4a38-830c-a5b7fdec9d0e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.040498] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67b878f2-d183-4414-9dd1-1366044d13c4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.080794] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5314756d-41ea-418d-a15c-d777d50866a0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.088981] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dacf8d2-1dd4-4f27-aab8-a4b6c184da8c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.104775] env[62405]: DEBUG nova.virt.block_device [None req-3e4d01c4-e5d5-43fc-99ce-5a64916ac21b tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] Updating existing volume attachment record: d1ad655c-79c9-412c-b6a2-5efcd198f813 {{(pid=62405) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1567.150812] env[62405]: DEBUG nova.scheduler.client.report [None req-47ff8dfd-ab05-4f7c-b87a-2b9d28fef598 tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1567.196235] env[62405]: DEBUG oslo_vmware.api [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1946943, 'name': Rename_Task, 'duration_secs': 0.165851} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1567.196543] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1567.196809] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4f012200-ae57-4943-84ed-f74f379c670a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.205510] env[62405]: DEBUG oslo_vmware.api [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for the task: (returnval){ [ 1567.205510] env[62405]: value = "task-1946948" [ 1567.205510] env[62405]: _type = "Task" [ 1567.205510] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1567.213954] env[62405]: DEBUG oslo_vmware.api [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1946948, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1567.225093] env[62405]: DEBUG oslo_vmware.api [None req-0aecbc13-26c7-479c-b021-1231aaf03970 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1946944, 'name': PowerOffVM_Task, 'duration_secs': 0.188976} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1567.225457] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-0aecbc13-26c7-479c-b021-1231aaf03970 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: ca4d11fe-1d0f-468b-a2f4-21c5b84342ab] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1567.225571] env[62405]: DEBUG nova.compute.manager [None req-0aecbc13-26c7-479c-b021-1231aaf03970 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: ca4d11fe-1d0f-468b-a2f4-21c5b84342ab] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1567.226332] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7749caeb-7628-4d5d-b989-6776542d890b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.356054] env[62405]: DEBUG nova.compute.manager [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 23748dfd-7c60-41db-8acb-7b49cf1c27db] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1567.397774] env[62405]: DEBUG oslo_concurrency.lockutils [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Releasing lock "refresh_cache-e8ed73c3-fb86-42c3-aae6-b0c8d03149ce" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1567.398046] env[62405]: DEBUG nova.compute.manager [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: e8ed73c3-fb86-42c3-aae6-b0c8d03149ce] Instance network_info: |[{"id": "ce532b3f-30ef-4d32-b533-7a04d491a6d4", "address": "fa:16:3e:a2:aa:a6", "network": {"id": "845c4582-a0c8-4565-8025-5cc0fd22ff9a", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1066509768-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f1a1645e38674042828c78155974f95e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapce532b3f-30", "ovs_interfaceid": "ce532b3f-30ef-4d32-b533-7a04d491a6d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1567.398497] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: e8ed73c3-fb86-42c3-aae6-b0c8d03149ce] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a2:aa:a6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ca83c3bc-f3ec-42ab-85b3-192512f766f3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ce532b3f-30ef-4d32-b533-7a04d491a6d4', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1567.407737] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Creating folder: Project (f1a1645e38674042828c78155974f95e). Parent ref: group-v401284. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1567.408868] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-092216b5-89e9-483f-b9db-c6616ae7bc70 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.414609] env[62405]: DEBUG nova.network.neutron [req-353c5b7b-b4ff-4e6e-9bda-603290e7d02c req-df1f1086-b366-4d5e-b0db-6e83440a76c2 service nova] [instance: b21dc1e7-dacd-4154-9bc3-0fa3774695a8] Updated VIF entry in instance network info cache for port e3b36820-3fc9-4b42-820d-9018b302c322. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1567.414942] env[62405]: DEBUG nova.network.neutron [req-353c5b7b-b4ff-4e6e-9bda-603290e7d02c req-df1f1086-b366-4d5e-b0db-6e83440a76c2 service nova] [instance: b21dc1e7-dacd-4154-9bc3-0fa3774695a8] Updating instance_info_cache with network_info: [{"id": "e3b36820-3fc9-4b42-820d-9018b302c322", "address": "fa:16:3e:a3:2a:5c", "network": {"id": "890b933d-5687-4c3b-aab8-4c8d68c71772", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-315909913-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.196", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "915d6ea5e5184efab9fbeda21e3b8a64", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "399f3826-705c-45f7-9fe0-3a08a945151a", "external-id": "nsx-vlan-transportzone-936", "segmentation_id": 936, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape3b36820-3f", "ovs_interfaceid": "e3b36820-3fc9-4b42-820d-9018b302c322", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1567.423650] env[62405]: DEBUG oslo_vmware.api [None req-8aeb1831-33a5-4b84-9f03-3c806576c207 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Task: {'id': task-1946946, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.217045} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1567.423650] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-8aeb1831-33a5-4b84-9f03-3c806576c207 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1567.424137] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-8aeb1831-33a5-4b84-9f03-3c806576c207 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: ca0aca02-4b99-4393-900c-b9cb0dad55c7] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1567.424137] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-8aeb1831-33a5-4b84-9f03-3c806576c207 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: ca0aca02-4b99-4393-900c-b9cb0dad55c7] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1567.424137] env[62405]: INFO nova.compute.manager [None req-8aeb1831-33a5-4b84-9f03-3c806576c207 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: ca0aca02-4b99-4393-900c-b9cb0dad55c7] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1567.424286] env[62405]: DEBUG oslo.service.loopingcall [None req-8aeb1831-33a5-4b84-9f03-3c806576c207 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1567.425475] env[62405]: DEBUG nova.compute.manager [-] [instance: ca0aca02-4b99-4393-900c-b9cb0dad55c7] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1567.425583] env[62405]: DEBUG nova.network.neutron [-] [instance: ca0aca02-4b99-4393-900c-b9cb0dad55c7] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1567.427294] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Created folder: Project (f1a1645e38674042828c78155974f95e) in parent group-v401284. [ 1567.427473] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Creating folder: Instances. Parent ref: group-v401367. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1567.427719] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-35f769b7-e277-4962-9744-ced668c406d6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.444821] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Created folder: Instances in parent group-v401367. [ 1567.445095] env[62405]: DEBUG oslo.service.loopingcall [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1567.445613] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e8ed73c3-fb86-42c3-aae6-b0c8d03149ce] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1567.445828] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6e24ad0c-f182-40ee-a719-ff4b942accc5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.468009] env[62405]: DEBUG oslo_vmware.api [None req-180e9cc9-d85e-4c76-b477-828efa2e0af1 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': task-1946942, 'name': CreateSnapshot_Task, 'duration_secs': 0.873898} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1567.468728] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-180e9cc9-d85e-4c76-b477-828efa2e0af1 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b3647042-89a1-4d15-b85e-49a5c8def1d4] Created Snapshot of the VM instance {{(pid=62405) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1567.469522] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94d0f285-48bc-4127-9124-4e916aed79c1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.473467] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1567.473467] env[62405]: value = "task-1946951" [ 1567.473467] env[62405]: _type = "Task" [ 1567.473467] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1567.492027] env[62405]: DEBUG oslo_vmware.api [None req-658dcdfc-19ab-4f5d-8a1b-f6e6dfce33ff tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Task: {'id': task-1946947, 'name': PowerOffVM_Task, 'duration_secs': 0.16349} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1567.492027] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-658dcdfc-19ab-4f5d-8a1b-f6e6dfce33ff tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: a1d35009-ea11-4e64-bbe4-604ed39d08f4] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1567.492027] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-658dcdfc-19ab-4f5d-8a1b-f6e6dfce33ff tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: a1d35009-ea11-4e64-bbe4-604ed39d08f4] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1567.492027] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-655317ea-770f-41b9-a57e-fcc845a7e199 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.496854] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946951, 'name': CreateVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1567.584131] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-658dcdfc-19ab-4f5d-8a1b-f6e6dfce33ff tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: a1d35009-ea11-4e64-bbe4-604ed39d08f4] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1567.584381] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-658dcdfc-19ab-4f5d-8a1b-f6e6dfce33ff tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: a1d35009-ea11-4e64-bbe4-604ed39d08f4] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1567.584586] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-658dcdfc-19ab-4f5d-8a1b-f6e6dfce33ff tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Deleting the datastore file [datastore1] a1d35009-ea11-4e64-bbe4-604ed39d08f4 {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1567.584856] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-86acc4c4-a6a2-4a06-9ae5-e99f91fe5064 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.591630] env[62405]: DEBUG oslo_vmware.api [None req-658dcdfc-19ab-4f5d-8a1b-f6e6dfce33ff tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Waiting for the task: (returnval){ [ 1567.591630] env[62405]: value = "task-1946953" [ 1567.591630] env[62405]: _type = "Task" [ 1567.591630] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1567.601894] env[62405]: DEBUG oslo_vmware.api [None req-658dcdfc-19ab-4f5d-8a1b-f6e6dfce33ff tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Task: {'id': task-1946953, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1567.662823] env[62405]: DEBUG oslo_concurrency.lockutils [None req-47ff8dfd-ab05-4f7c-b87a-2b9d28fef598 tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.268s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1567.667179] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f5ea0adf-5d7b-453b-be2f-0de070bac2fe tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.064s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1567.667179] env[62405]: DEBUG nova.objects.instance [None req-f5ea0adf-5d7b-453b-be2f-0de070bac2fe tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Lazy-loading 'resources' on Instance uuid 0eec4a5f-9f9b-4a86-a046-2e2d107adc48 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1567.709019] env[62405]: INFO nova.scheduler.client.report [None req-47ff8dfd-ab05-4f7c-b87a-2b9d28fef598 tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Deleted allocations for instance 6199de01-baca-4461-9572-111eda11adac [ 1567.723982] env[62405]: DEBUG oslo_vmware.api [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1946948, 'name': PowerOnVM_Task} progress is 1%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1567.739291] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0aecbc13-26c7-479c-b021-1231aaf03970 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Lock "ca4d11fe-1d0f-468b-a2f4-21c5b84342ab" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.050s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1567.881765] env[62405]: DEBUG oslo_concurrency.lockutils [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1567.917155] env[62405]: DEBUG oslo_concurrency.lockutils [req-353c5b7b-b4ff-4e6e-9bda-603290e7d02c req-df1f1086-b366-4d5e-b0db-6e83440a76c2 service nova] Releasing lock "refresh_cache-b21dc1e7-dacd-4154-9bc3-0fa3774695a8" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1567.994716] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-180e9cc9-d85e-4c76-b477-828efa2e0af1 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b3647042-89a1-4d15-b85e-49a5c8def1d4] Creating linked-clone VM from snapshot {{(pid=62405) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1567.994716] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946951, 'name': CreateVM_Task, 'duration_secs': 0.435491} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1567.994716] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-633f932f-6127-4ef0-acee-09f23584990f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.997148] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e8ed73c3-fb86-42c3-aae6-b0c8d03149ce] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1567.998056] env[62405]: DEBUG oslo_concurrency.lockutils [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1567.998300] env[62405]: DEBUG oslo_concurrency.lockutils [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1567.998701] env[62405]: DEBUG oslo_concurrency.lockutils [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1567.999386] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8aa20077-8559-4908-a67b-5ff288a3a34f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.005491] env[62405]: DEBUG oslo_vmware.api [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for the task: (returnval){ [ 1568.005491] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5266a317-43be-0455-2010-c306e5a6ab06" [ 1568.005491] env[62405]: _type = "Task" [ 1568.005491] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1568.006145] env[62405]: DEBUG oslo_vmware.api [None req-180e9cc9-d85e-4c76-b477-828efa2e0af1 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Waiting for the task: (returnval){ [ 1568.006145] env[62405]: value = "task-1946954" [ 1568.006145] env[62405]: _type = "Task" [ 1568.006145] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1568.018094] env[62405]: DEBUG oslo_vmware.api [None req-180e9cc9-d85e-4c76-b477-828efa2e0af1 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': task-1946954, 'name': CloneVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1568.021879] env[62405]: DEBUG oslo_vmware.api [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5266a317-43be-0455-2010-c306e5a6ab06, 'name': SearchDatastore_Task, 'duration_secs': 0.011523} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1568.022310] env[62405]: DEBUG oslo_concurrency.lockutils [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1568.022425] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: e8ed73c3-fb86-42c3-aae6-b0c8d03149ce] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1568.022650] env[62405]: DEBUG oslo_concurrency.lockutils [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1568.022790] env[62405]: DEBUG oslo_concurrency.lockutils [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1568.022959] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1568.023230] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bd4b630d-cb1a-4266-9de8-1df8e3d57d4f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.034231] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1568.034461] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1568.035711] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9c3a488a-d972-4aea-bb69-b26229f7ec15 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.042724] env[62405]: DEBUG oslo_vmware.api [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for the task: (returnval){ [ 1568.042724] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5202f8f5-b9ac-3ebc-ca9a-1d3f0b48b4a7" [ 1568.042724] env[62405]: _type = "Task" [ 1568.042724] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1568.051375] env[62405]: DEBUG oslo_vmware.api [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5202f8f5-b9ac-3ebc-ca9a-1d3f0b48b4a7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1568.103185] env[62405]: DEBUG oslo_vmware.api [None req-658dcdfc-19ab-4f5d-8a1b-f6e6dfce33ff tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Task: {'id': task-1946953, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.176055} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1568.103550] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-658dcdfc-19ab-4f5d-8a1b-f6e6dfce33ff tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1568.103758] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-658dcdfc-19ab-4f5d-8a1b-f6e6dfce33ff tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: a1d35009-ea11-4e64-bbe4-604ed39d08f4] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1568.103954] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-658dcdfc-19ab-4f5d-8a1b-f6e6dfce33ff tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: a1d35009-ea11-4e64-bbe4-604ed39d08f4] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1568.104194] env[62405]: INFO nova.compute.manager [None req-658dcdfc-19ab-4f5d-8a1b-f6e6dfce33ff tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] [instance: a1d35009-ea11-4e64-bbe4-604ed39d08f4] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1568.104465] env[62405]: DEBUG oslo.service.loopingcall [None req-658dcdfc-19ab-4f5d-8a1b-f6e6dfce33ff tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1568.104692] env[62405]: DEBUG nova.compute.manager [-] [instance: a1d35009-ea11-4e64-bbe4-604ed39d08f4] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1568.104784] env[62405]: DEBUG nova.network.neutron [-] [instance: a1d35009-ea11-4e64-bbe4-604ed39d08f4] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1568.207562] env[62405]: DEBUG nova.network.neutron [-] [instance: ca0aca02-4b99-4393-900c-b9cb0dad55c7] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1568.225211] env[62405]: DEBUG oslo_concurrency.lockutils [None req-47ff8dfd-ab05-4f7c-b87a-2b9d28fef598 tempest-ServerTagsTestJSON-530814057 tempest-ServerTagsTestJSON-530814057-project-member] Lock "6199de01-baca-4461-9572-111eda11adac" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.660s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1568.233718] env[62405]: DEBUG oslo_vmware.api [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1946948, 'name': PowerOnVM_Task} progress is 64%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1568.520435] env[62405]: DEBUG oslo_vmware.api [None req-180e9cc9-d85e-4c76-b477-828efa2e0af1 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': task-1946954, 'name': CloneVM_Task} progress is 94%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1568.555930] env[62405]: DEBUG oslo_vmware.api [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5202f8f5-b9ac-3ebc-ca9a-1d3f0b48b4a7, 'name': SearchDatastore_Task, 'duration_secs': 0.028085} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1568.562697] env[62405]: DEBUG nova.compute.manager [req-969d388f-9a6d-4631-9a25-a95c85802179 req-7d9c84a0-736f-4c33-86a3-76ef311c2f1b service nova] [instance: ca0aca02-4b99-4393-900c-b9cb0dad55c7] Received event network-vif-deleted-2df3353e-cc22-401d-ba57-099a6e08d7e7 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1568.563752] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d8c393dc-6e11-4930-b60e-21189577764b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.573038] env[62405]: DEBUG oslo_vmware.api [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for the task: (returnval){ [ 1568.573038] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52286427-13e0-dad8-05f4-1f5f6ac7fa03" [ 1568.573038] env[62405]: _type = "Task" [ 1568.573038] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1568.582589] env[62405]: DEBUG oslo_vmware.api [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52286427-13e0-dad8-05f4-1f5f6ac7fa03, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1568.723089] env[62405]: INFO nova.compute.manager [-] [instance: ca0aca02-4b99-4393-900c-b9cb0dad55c7] Took 1.30 seconds to deallocate network for instance. [ 1568.723693] env[62405]: DEBUG oslo_vmware.api [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1946948, 'name': PowerOnVM_Task} progress is 91%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1568.792863] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9ae6d35-ced4-4016-9bc8-0867d96ad37f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.803337] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a88ac594-60a4-4245-b83b-3b9d293a5544 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.837512] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-059a3979-508e-49b5-b7f3-412f03eda21d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.846478] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db71efa4-8ff7-4057-9681-7849219f3c4b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.866020] env[62405]: DEBUG nova.compute.provider_tree [None req-f5ea0adf-5d7b-453b-be2f-0de070bac2fe tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1568.935731] env[62405]: DEBUG nova.network.neutron [-] [instance: a1d35009-ea11-4e64-bbe4-604ed39d08f4] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1569.022347] env[62405]: DEBUG oslo_vmware.api [None req-180e9cc9-d85e-4c76-b477-828efa2e0af1 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': task-1946954, 'name': CloneVM_Task} progress is 94%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1569.026118] env[62405]: DEBUG nova.compute.manager [req-1ceb959f-e6c2-4011-9955-99114aff4fb3 req-8c2ea707-2fbb-492a-92cf-2a9fd52f43de service nova] [instance: e8ed73c3-fb86-42c3-aae6-b0c8d03149ce] Received event network-changed-ce532b3f-30ef-4d32-b533-7a04d491a6d4 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1569.026365] env[62405]: DEBUG nova.compute.manager [req-1ceb959f-e6c2-4011-9955-99114aff4fb3 req-8c2ea707-2fbb-492a-92cf-2a9fd52f43de service nova] [instance: e8ed73c3-fb86-42c3-aae6-b0c8d03149ce] Refreshing instance network info cache due to event network-changed-ce532b3f-30ef-4d32-b533-7a04d491a6d4. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1569.026630] env[62405]: DEBUG oslo_concurrency.lockutils [req-1ceb959f-e6c2-4011-9955-99114aff4fb3 req-8c2ea707-2fbb-492a-92cf-2a9fd52f43de service nova] Acquiring lock "refresh_cache-e8ed73c3-fb86-42c3-aae6-b0c8d03149ce" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1569.026813] env[62405]: DEBUG oslo_concurrency.lockutils [req-1ceb959f-e6c2-4011-9955-99114aff4fb3 req-8c2ea707-2fbb-492a-92cf-2a9fd52f43de service nova] Acquired lock "refresh_cache-e8ed73c3-fb86-42c3-aae6-b0c8d03149ce" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1569.027023] env[62405]: DEBUG nova.network.neutron [req-1ceb959f-e6c2-4011-9955-99114aff4fb3 req-8c2ea707-2fbb-492a-92cf-2a9fd52f43de service nova] [instance: e8ed73c3-fb86-42c3-aae6-b0c8d03149ce] Refreshing network info cache for port ce532b3f-30ef-4d32-b533-7a04d491a6d4 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1569.082129] env[62405]: DEBUG oslo_vmware.api [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52286427-13e0-dad8-05f4-1f5f6ac7fa03, 'name': SearchDatastore_Task, 'duration_secs': 0.01143} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1569.082349] env[62405]: DEBUG oslo_concurrency.lockutils [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1569.082614] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] e8ed73c3-fb86-42c3-aae6-b0c8d03149ce/e8ed73c3-fb86-42c3-aae6-b0c8d03149ce.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1569.082883] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-adea560b-c2b5-4d46-8083-c199e9b75c2e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.090735] env[62405]: DEBUG oslo_vmware.api [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for the task: (returnval){ [ 1569.090735] env[62405]: value = "task-1946955" [ 1569.090735] env[62405]: _type = "Task" [ 1569.090735] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1569.099865] env[62405]: DEBUG oslo_vmware.api [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1946955, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1569.120166] env[62405]: DEBUG nova.network.neutron [None req-3e4d01c4-e5d5-43fc-99ce-5a64916ac21b tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] Successfully updated port: b5922da6-f3d2-478a-8756-ea7020186366 {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1569.219732] env[62405]: DEBUG oslo_vmware.api [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1946948, 'name': PowerOnVM_Task, 'duration_secs': 1.537215} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1569.219732] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1569.219732] env[62405]: INFO nova.compute.manager [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Took 11.51 seconds to spawn the instance on the hypervisor. [ 1569.220254] env[62405]: DEBUG nova.compute.manager [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1569.221046] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-902e1fe9-7147-480e-a4aa-9ce32fc58dc6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.236860] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8aeb1831-33a5-4b84-9f03-3c806576c207 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1569.264724] env[62405]: DEBUG nova.compute.manager [None req-3e4d01c4-e5d5-43fc-99ce-5a64916ac21b tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1569.265327] env[62405]: DEBUG nova.virt.hardware [None req-3e4d01c4-e5d5-43fc-99ce-5a64916ac21b tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1569.265558] env[62405]: DEBUG nova.virt.hardware [None req-3e4d01c4-e5d5-43fc-99ce-5a64916ac21b tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1569.265766] env[62405]: DEBUG nova.virt.hardware [None req-3e4d01c4-e5d5-43fc-99ce-5a64916ac21b tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1569.266015] env[62405]: DEBUG nova.virt.hardware [None req-3e4d01c4-e5d5-43fc-99ce-5a64916ac21b tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1569.266201] env[62405]: DEBUG nova.virt.hardware [None req-3e4d01c4-e5d5-43fc-99ce-5a64916ac21b tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1569.266407] env[62405]: DEBUG nova.virt.hardware [None req-3e4d01c4-e5d5-43fc-99ce-5a64916ac21b tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1569.266628] env[62405]: DEBUG nova.virt.hardware [None req-3e4d01c4-e5d5-43fc-99ce-5a64916ac21b tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1569.266786] env[62405]: DEBUG nova.virt.hardware [None req-3e4d01c4-e5d5-43fc-99ce-5a64916ac21b tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1569.266960] env[62405]: DEBUG nova.virt.hardware [None req-3e4d01c4-e5d5-43fc-99ce-5a64916ac21b tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1569.267140] env[62405]: DEBUG nova.virt.hardware [None req-3e4d01c4-e5d5-43fc-99ce-5a64916ac21b tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1569.267780] env[62405]: DEBUG nova.virt.hardware [None req-3e4d01c4-e5d5-43fc-99ce-5a64916ac21b tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1569.268744] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15a7b1fe-692d-46b8-8a11-72e263c2a0e0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.277804] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddd86071-5441-4200-b439-b6e64c50c073 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.369327] env[62405]: DEBUG nova.scheduler.client.report [None req-f5ea0adf-5d7b-453b-be2f-0de070bac2fe tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1569.439502] env[62405]: INFO nova.compute.manager [-] [instance: a1d35009-ea11-4e64-bbe4-604ed39d08f4] Took 1.33 seconds to deallocate network for instance. [ 1569.522916] env[62405]: DEBUG oslo_vmware.api [None req-180e9cc9-d85e-4c76-b477-828efa2e0af1 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': task-1946954, 'name': CloneVM_Task} progress is 94%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1569.602037] env[62405]: DEBUG oslo_vmware.api [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1946955, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1569.625256] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3e4d01c4-e5d5-43fc-99ce-5a64916ac21b tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] Acquiring lock "refresh_cache-fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1569.625454] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3e4d01c4-e5d5-43fc-99ce-5a64916ac21b tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] Acquired lock "refresh_cache-fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1569.625593] env[62405]: DEBUG nova.network.neutron [None req-3e4d01c4-e5d5-43fc-99ce-5a64916ac21b tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1569.744991] env[62405]: INFO nova.compute.manager [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Took 51.45 seconds to build instance. [ 1569.876376] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f5ea0adf-5d7b-453b-be2f-0de070bac2fe tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.210s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1569.881685] env[62405]: DEBUG oslo_concurrency.lockutils [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.402s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1569.884028] env[62405]: INFO nova.compute.claims [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] [instance: 14dab775-19b4-4d0d-a7ee-67705f7e45ca] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1569.915869] env[62405]: INFO nova.scheduler.client.report [None req-f5ea0adf-5d7b-453b-be2f-0de070bac2fe tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Deleted allocations for instance 0eec4a5f-9f9b-4a86-a046-2e2d107adc48 [ 1569.946660] env[62405]: DEBUG oslo_concurrency.lockutils [None req-658dcdfc-19ab-4f5d-8a1b-f6e6dfce33ff tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1570.023050] env[62405]: DEBUG oslo_vmware.api [None req-180e9cc9-d85e-4c76-b477-828efa2e0af1 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': task-1946954, 'name': CloneVM_Task} progress is 94%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1570.106670] env[62405]: DEBUG oslo_vmware.api [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1946955, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1570.166863] env[62405]: DEBUG nova.network.neutron [None req-3e4d01c4-e5d5-43fc-99ce-5a64916ac21b tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1570.248422] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b7b436c9-efbd-4790-9d5f-74a35908a712 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Lock "15218373-ffa5-49ce-b604-423b7fc5fb35" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 63.760s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1570.350532] env[62405]: DEBUG nova.network.neutron [None req-3e4d01c4-e5d5-43fc-99ce-5a64916ac21b tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] Updating instance_info_cache with network_info: [{"id": "b5922da6-f3d2-478a-8756-ea7020186366", "address": "fa:16:3e:2e:48:34", "network": {"id": "8c716b29-2304-47f6-8885-183e7c81bda2", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-73064672-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3c96e0244edf49db9cd520b5e359fc87", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9643129c-1d95-4422-9df1-2c21289bd5d6", "external-id": "nsx-vlan-transportzone-917", "segmentation_id": 917, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb5922da6-f3", "ovs_interfaceid": "b5922da6-f3d2-478a-8756-ea7020186366", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1570.387478] env[62405]: DEBUG nova.network.neutron [req-1ceb959f-e6c2-4011-9955-99114aff4fb3 req-8c2ea707-2fbb-492a-92cf-2a9fd52f43de service nova] [instance: e8ed73c3-fb86-42c3-aae6-b0c8d03149ce] Updated VIF entry in instance network info cache for port ce532b3f-30ef-4d32-b533-7a04d491a6d4. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1570.387999] env[62405]: DEBUG nova.network.neutron [req-1ceb959f-e6c2-4011-9955-99114aff4fb3 req-8c2ea707-2fbb-492a-92cf-2a9fd52f43de service nova] [instance: e8ed73c3-fb86-42c3-aae6-b0c8d03149ce] Updating instance_info_cache with network_info: [{"id": "ce532b3f-30ef-4d32-b533-7a04d491a6d4", "address": "fa:16:3e:a2:aa:a6", "network": {"id": "845c4582-a0c8-4565-8025-5cc0fd22ff9a", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1066509768-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f1a1645e38674042828c78155974f95e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapce532b3f-30", "ovs_interfaceid": "ce532b3f-30ef-4d32-b533-7a04d491a6d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1570.426810] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f5ea0adf-5d7b-453b-be2f-0de070bac2fe tempest-AttachInterfacesV270Test-2142249887 tempest-AttachInterfacesV270Test-2142249887-project-member] Lock "0eec4a5f-9f9b-4a86-a046-2e2d107adc48" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.070s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1570.525028] env[62405]: DEBUG oslo_vmware.api [None req-180e9cc9-d85e-4c76-b477-828efa2e0af1 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': task-1946954, 'name': CloneVM_Task} progress is 95%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1570.607645] env[62405]: DEBUG oslo_vmware.api [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1946955, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.1262} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1570.608100] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] e8ed73c3-fb86-42c3-aae6-b0c8d03149ce/e8ed73c3-fb86-42c3-aae6-b0c8d03149ce.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1570.608241] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: e8ed73c3-fb86-42c3-aae6-b0c8d03149ce] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1570.609030] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e59444b2-9654-4568-950a-50f566fbcce2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.615684] env[62405]: DEBUG oslo_vmware.api [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for the task: (returnval){ [ 1570.615684] env[62405]: value = "task-1946956" [ 1570.615684] env[62405]: _type = "Task" [ 1570.615684] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1570.624577] env[62405]: DEBUG oslo_vmware.api [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1946956, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1570.752187] env[62405]: DEBUG nova.compute.manager [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 67bf25ea-5774-4246-a3e6-2aeb0ebf6731] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1570.855898] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3e4d01c4-e5d5-43fc-99ce-5a64916ac21b tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] Releasing lock "refresh_cache-fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1570.858517] env[62405]: DEBUG nova.compute.manager [None req-3e4d01c4-e5d5-43fc-99ce-5a64916ac21b tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] Instance network_info: |[{"id": "b5922da6-f3d2-478a-8756-ea7020186366", "address": "fa:16:3e:2e:48:34", "network": {"id": "8c716b29-2304-47f6-8885-183e7c81bda2", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-73064672-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3c96e0244edf49db9cd520b5e359fc87", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9643129c-1d95-4422-9df1-2c21289bd5d6", "external-id": "nsx-vlan-transportzone-917", "segmentation_id": 917, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb5922da6-f3", "ovs_interfaceid": "b5922da6-f3d2-478a-8756-ea7020186366", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1570.858728] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-3e4d01c4-e5d5-43fc-99ce-5a64916ac21b tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2e:48:34', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9643129c-1d95-4422-9df1-2c21289bd5d6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b5922da6-f3d2-478a-8756-ea7020186366', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1570.868235] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e4d01c4-e5d5-43fc-99ce-5a64916ac21b tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] Creating folder: Project (3c96e0244edf49db9cd520b5e359fc87). Parent ref: group-v401284. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1570.868235] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-03e10596-0099-4a34-a898-9adad809210c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.882989] env[62405]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 1570.886177] env[62405]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=62405) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 1570.886177] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e4d01c4-e5d5-43fc-99ce-5a64916ac21b tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] Folder already exists: Project (3c96e0244edf49db9cd520b5e359fc87). Parent ref: group-v401284. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 1570.886177] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e4d01c4-e5d5-43fc-99ce-5a64916ac21b tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] Creating folder: Instances. Parent ref: group-v401319. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1570.886177] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f9212a4d-30ca-4b8c-828b-2f59f512e0ff {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.893088] env[62405]: DEBUG oslo_concurrency.lockutils [req-1ceb959f-e6c2-4011-9955-99114aff4fb3 req-8c2ea707-2fbb-492a-92cf-2a9fd52f43de service nova] Releasing lock "refresh_cache-e8ed73c3-fb86-42c3-aae6-b0c8d03149ce" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1570.895038] env[62405]: DEBUG nova.compute.manager [req-1ceb959f-e6c2-4011-9955-99114aff4fb3 req-8c2ea707-2fbb-492a-92cf-2a9fd52f43de service nova] [instance: 262424b0-dc7d-4b6c-9539-2d6cd23a93da] Received event network-changed-0974798a-a146-421e-a104-caeb56db51b3 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1570.895038] env[62405]: DEBUG nova.compute.manager [req-1ceb959f-e6c2-4011-9955-99114aff4fb3 req-8c2ea707-2fbb-492a-92cf-2a9fd52f43de service nova] [instance: 262424b0-dc7d-4b6c-9539-2d6cd23a93da] Refreshing instance network info cache due to event network-changed-0974798a-a146-421e-a104-caeb56db51b3. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1570.895038] env[62405]: DEBUG oslo_concurrency.lockutils [req-1ceb959f-e6c2-4011-9955-99114aff4fb3 req-8c2ea707-2fbb-492a-92cf-2a9fd52f43de service nova] Acquiring lock "refresh_cache-262424b0-dc7d-4b6c-9539-2d6cd23a93da" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1570.895038] env[62405]: DEBUG oslo_concurrency.lockutils [req-1ceb959f-e6c2-4011-9955-99114aff4fb3 req-8c2ea707-2fbb-492a-92cf-2a9fd52f43de service nova] Acquired lock "refresh_cache-262424b0-dc7d-4b6c-9539-2d6cd23a93da" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1570.895038] env[62405]: DEBUG nova.network.neutron [req-1ceb959f-e6c2-4011-9955-99114aff4fb3 req-8c2ea707-2fbb-492a-92cf-2a9fd52f43de service nova] [instance: 262424b0-dc7d-4b6c-9539-2d6cd23a93da] Refreshing network info cache for port 0974798a-a146-421e-a104-caeb56db51b3 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1570.896944] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-3e4d01c4-e5d5-43fc-99ce-5a64916ac21b tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] Created folder: Instances in parent group-v401319. [ 1570.897238] env[62405]: DEBUG oslo.service.loopingcall [None req-3e4d01c4-e5d5-43fc-99ce-5a64916ac21b tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1570.900564] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1570.901934] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ba326cdc-9383-4a6d-8781-59eb67e64836 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.924148] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1570.924148] env[62405]: value = "task-1946959" [ 1570.924148] env[62405]: _type = "Task" [ 1570.924148] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1570.931490] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946959, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1571.028390] env[62405]: DEBUG oslo_vmware.api [None req-180e9cc9-d85e-4c76-b477-828efa2e0af1 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': task-1946954, 'name': CloneVM_Task, 'duration_secs': 2.679172} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1571.031276] env[62405]: INFO nova.virt.vmwareapi.vmops [None req-180e9cc9-d85e-4c76-b477-828efa2e0af1 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b3647042-89a1-4d15-b85e-49a5c8def1d4] Created linked-clone VM from snapshot [ 1571.032792] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52c2472e-78c4-42fb-8f07-14ca57aa15aa {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.040212] env[62405]: DEBUG nova.virt.vmwareapi.images [None req-180e9cc9-d85e-4c76-b477-828efa2e0af1 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b3647042-89a1-4d15-b85e-49a5c8def1d4] Uploading image dc99f2b6-e635-457f-9283-66df59ba98ab {{(pid=62405) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1571.072345] env[62405]: DEBUG oslo_vmware.rw_handles [None req-180e9cc9-d85e-4c76-b477-828efa2e0af1 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1571.072345] env[62405]: value = "vm-401370" [ 1571.072345] env[62405]: _type = "VirtualMachine" [ 1571.072345] env[62405]: }. {{(pid=62405) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1571.072345] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-6185c6c6-5806-4287-b558-d3fcb2d67f65 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.079555] env[62405]: DEBUG oslo_vmware.rw_handles [None req-180e9cc9-d85e-4c76-b477-828efa2e0af1 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Lease: (returnval){ [ 1571.079555] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5223f1a5-1219-a61e-f4d2-825574cc52c5" [ 1571.079555] env[62405]: _type = "HttpNfcLease" [ 1571.079555] env[62405]: } obtained for exporting VM: (result){ [ 1571.079555] env[62405]: value = "vm-401370" [ 1571.079555] env[62405]: _type = "VirtualMachine" [ 1571.079555] env[62405]: }. {{(pid=62405) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1571.079809] env[62405]: DEBUG oslo_vmware.api [None req-180e9cc9-d85e-4c76-b477-828efa2e0af1 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Waiting for the lease: (returnval){ [ 1571.079809] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5223f1a5-1219-a61e-f4d2-825574cc52c5" [ 1571.079809] env[62405]: _type = "HttpNfcLease" [ 1571.079809] env[62405]: } to be ready. {{(pid=62405) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1571.090534] env[62405]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1571.090534] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5223f1a5-1219-a61e-f4d2-825574cc52c5" [ 1571.090534] env[62405]: _type = "HttpNfcLease" [ 1571.090534] env[62405]: } is initializing. {{(pid=62405) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1571.128592] env[62405]: DEBUG oslo_vmware.api [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1946956, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074217} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1571.128892] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: e8ed73c3-fb86-42c3-aae6-b0c8d03149ce] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1571.129809] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6665ea94-52f1-4a1a-ba58-d8065d495278 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.156645] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: e8ed73c3-fb86-42c3-aae6-b0c8d03149ce] Reconfiguring VM instance instance-0000001b to attach disk [datastore1] e8ed73c3-fb86-42c3-aae6-b0c8d03149ce/e8ed73c3-fb86-42c3-aae6-b0c8d03149ce.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1571.160130] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-48d59ad8-fe2b-402d-a12e-c256a386f138 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.180962] env[62405]: DEBUG oslo_vmware.api [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for the task: (returnval){ [ 1571.180962] env[62405]: value = "task-1946961" [ 1571.180962] env[62405]: _type = "Task" [ 1571.180962] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1571.189893] env[62405]: DEBUG oslo_vmware.api [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1946961, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1571.277454] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1571.436355] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946959, 'name': CreateVM_Task, 'duration_secs': 0.379118} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1571.436518] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1571.438290] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-3e4d01c4-e5d5-43fc-99ce-5a64916ac21b tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'boot_index': 0, 'delete_on_termination': True, 'guest_format': None, 'mount_device': '/dev/sda', 'device_type': None, 'disk_bus': None, 'attachment_id': 'd1ad655c-79c9-412c-b6a2-5efcd198f813', 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-401333', 'volume_id': '2c06d022-a782-4194-9dee-348bf3888516', 'name': 'volume-2c06d022-a782-4194-9dee-348bf3888516', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9', 'attached_at': '', 'detached_at': '', 'volume_id': '2c06d022-a782-4194-9dee-348bf3888516', 'serial': '2c06d022-a782-4194-9dee-348bf3888516'}, 'volume_type': None}], 'swap': None} {{(pid=62405) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1571.438290] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-3e4d01c4-e5d5-43fc-99ce-5a64916ac21b tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] Root volume attach. Driver type: vmdk {{(pid=62405) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1571.438725] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd6604c8-8e03-4932-a365-d1b4a114498d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.450771] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41e13520-54ab-4c03-b242-d0f98f1bd6ea {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.460033] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a3aac00-fa8d-4509-b1ec-f56001a37de6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.465872] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-80b218ea-3919-49ef-bc0a-003fef0923c3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.473320] env[62405]: DEBUG oslo_vmware.api [None req-3e4d01c4-e5d5-43fc-99ce-5a64916ac21b tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] Waiting for the task: (returnval){ [ 1571.473320] env[62405]: value = "task-1946962" [ 1571.473320] env[62405]: _type = "Task" [ 1571.473320] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1571.485201] env[62405]: DEBUG oslo_vmware.api [None req-3e4d01c4-e5d5-43fc-99ce-5a64916ac21b tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] Task: {'id': task-1946962, 'name': RelocateVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1571.516733] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd5ff904-4684-4bcc-a10d-dc02c5176041 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.525017] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a81291c1-d605-4055-836a-6871b95a4363 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.559402] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35a486c3-33ee-4e58-9579-402abdbca6ba {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.568433] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-901a30fd-3244-4f02-830e-23d5185778f8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.583958] env[62405]: DEBUG nova.compute.provider_tree [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1571.592016] env[62405]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1571.592016] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5223f1a5-1219-a61e-f4d2-825574cc52c5" [ 1571.592016] env[62405]: _type = "HttpNfcLease" [ 1571.592016] env[62405]: } is ready. {{(pid=62405) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1571.592344] env[62405]: DEBUG oslo_vmware.rw_handles [None req-180e9cc9-d85e-4c76-b477-828efa2e0af1 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1571.592344] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5223f1a5-1219-a61e-f4d2-825574cc52c5" [ 1571.592344] env[62405]: _type = "HttpNfcLease" [ 1571.592344] env[62405]: }. {{(pid=62405) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1571.593122] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adeccbb4-f9e1-4f56-b0af-c96ff4b76794 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.603293] env[62405]: DEBUG oslo_vmware.rw_handles [None req-180e9cc9-d85e-4c76-b477-828efa2e0af1 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52954580-95aa-b91f-7efd-c9faf786f682/disk-0.vmdk from lease info. {{(pid=62405) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1571.603577] env[62405]: DEBUG oslo_vmware.rw_handles [None req-180e9cc9-d85e-4c76-b477-828efa2e0af1 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52954580-95aa-b91f-7efd-c9faf786f682/disk-0.vmdk for reading. {{(pid=62405) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1571.692964] env[62405]: DEBUG oslo_vmware.api [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1946961, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1571.712969] env[62405]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-16570548-0bc6-4c17-b3e5-d20c0c68b685 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.825278] env[62405]: DEBUG nova.compute.manager [None req-2a532060-fb07-4421-b55c-c37fca490a6c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: ca4d11fe-1d0f-468b-a2f4-21c5b84342ab] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1571.826677] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e8ba9b5-08ba-4162-b0f0-958dad726bf0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.988881] env[62405]: DEBUG oslo_vmware.api [None req-3e4d01c4-e5d5-43fc-99ce-5a64916ac21b tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] Task: {'id': task-1946962, 'name': RelocateVM_Task} progress is 20%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1572.054740] env[62405]: DEBUG nova.network.neutron [req-1ceb959f-e6c2-4011-9955-99114aff4fb3 req-8c2ea707-2fbb-492a-92cf-2a9fd52f43de service nova] [instance: 262424b0-dc7d-4b6c-9539-2d6cd23a93da] Updated VIF entry in instance network info cache for port 0974798a-a146-421e-a104-caeb56db51b3. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1572.056313] env[62405]: DEBUG nova.network.neutron [req-1ceb959f-e6c2-4011-9955-99114aff4fb3 req-8c2ea707-2fbb-492a-92cf-2a9fd52f43de service nova] [instance: 262424b0-dc7d-4b6c-9539-2d6cd23a93da] Updating instance_info_cache with network_info: [{"id": "0974798a-a146-421e-a104-caeb56db51b3", "address": "fa:16:3e:9a:d1:33", "network": {"id": "869979f7-5a22-4c11-bb77-c48a5d5f934f", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1534576533-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6d1aee7c44f44abc86ed5c15b027e989", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08e9585e-6186-4788-9fd9-24174ce45a6f", "external-id": "nsx-vlan-transportzone-254", "segmentation_id": 254, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0974798a-a1", "ovs_interfaceid": "0974798a-a146-421e-a104-caeb56db51b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1572.083239] env[62405]: DEBUG nova.compute.manager [req-7156c453-ee0e-48cf-a85d-026d98e171fb req-10a1b993-6b6e-4156-84db-69e672f62290 service nova] [instance: a1d35009-ea11-4e64-bbe4-604ed39d08f4] Received event network-vif-deleted-d385dca6-fc58-4113-bd50-3886fbe12d53 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1572.092581] env[62405]: DEBUG nova.scheduler.client.report [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1572.193887] env[62405]: DEBUG oslo_vmware.api [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1946961, 'name': ReconfigVM_Task, 'duration_secs': 0.682929} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1572.194259] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: e8ed73c3-fb86-42c3-aae6-b0c8d03149ce] Reconfigured VM instance instance-0000001b to attach disk [datastore1] e8ed73c3-fb86-42c3-aae6-b0c8d03149ce/e8ed73c3-fb86-42c3-aae6-b0c8d03149ce.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1572.194961] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ea4e1bea-a9bf-4329-832e-987228972af7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.202425] env[62405]: DEBUG oslo_vmware.api [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for the task: (returnval){ [ 1572.202425] env[62405]: value = "task-1946963" [ 1572.202425] env[62405]: _type = "Task" [ 1572.202425] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1572.217632] env[62405]: DEBUG oslo_vmware.api [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1946963, 'name': Rename_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1572.271472] env[62405]: DEBUG nova.compute.manager [req-d3e08ffb-0c96-4e6e-8316-d1bd35922bcb req-3522d00b-7c9e-4f62-9a8d-4d7eedff6437 service nova] [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] Received event network-vif-plugged-b5922da6-f3d2-478a-8756-ea7020186366 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1572.271797] env[62405]: DEBUG oslo_concurrency.lockutils [req-d3e08ffb-0c96-4e6e-8316-d1bd35922bcb req-3522d00b-7c9e-4f62-9a8d-4d7eedff6437 service nova] Acquiring lock "fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1572.272743] env[62405]: DEBUG oslo_concurrency.lockutils [req-d3e08ffb-0c96-4e6e-8316-d1bd35922bcb req-3522d00b-7c9e-4f62-9a8d-4d7eedff6437 service nova] Lock "fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1572.273070] env[62405]: DEBUG oslo_concurrency.lockutils [req-d3e08ffb-0c96-4e6e-8316-d1bd35922bcb req-3522d00b-7c9e-4f62-9a8d-4d7eedff6437 service nova] Lock "fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1572.273316] env[62405]: DEBUG nova.compute.manager [req-d3e08ffb-0c96-4e6e-8316-d1bd35922bcb req-3522d00b-7c9e-4f62-9a8d-4d7eedff6437 service nova] [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] No waiting events found dispatching network-vif-plugged-b5922da6-f3d2-478a-8756-ea7020186366 {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1572.273520] env[62405]: WARNING nova.compute.manager [req-d3e08ffb-0c96-4e6e-8316-d1bd35922bcb req-3522d00b-7c9e-4f62-9a8d-4d7eedff6437 service nova] [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] Received unexpected event network-vif-plugged-b5922da6-f3d2-478a-8756-ea7020186366 for instance with vm_state building and task_state spawning. [ 1572.273731] env[62405]: DEBUG nova.compute.manager [req-d3e08ffb-0c96-4e6e-8316-d1bd35922bcb req-3522d00b-7c9e-4f62-9a8d-4d7eedff6437 service nova] [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] Received event network-changed-b5922da6-f3d2-478a-8756-ea7020186366 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1572.280261] env[62405]: DEBUG nova.compute.manager [req-d3e08ffb-0c96-4e6e-8316-d1bd35922bcb req-3522d00b-7c9e-4f62-9a8d-4d7eedff6437 service nova] [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] Refreshing instance network info cache due to event network-changed-b5922da6-f3d2-478a-8756-ea7020186366. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1572.280261] env[62405]: DEBUG oslo_concurrency.lockutils [req-d3e08ffb-0c96-4e6e-8316-d1bd35922bcb req-3522d00b-7c9e-4f62-9a8d-4d7eedff6437 service nova] Acquiring lock "refresh_cache-fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1572.280261] env[62405]: DEBUG oslo_concurrency.lockutils [req-d3e08ffb-0c96-4e6e-8316-d1bd35922bcb req-3522d00b-7c9e-4f62-9a8d-4d7eedff6437 service nova] Acquired lock "refresh_cache-fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1572.280261] env[62405]: DEBUG nova.network.neutron [req-d3e08ffb-0c96-4e6e-8316-d1bd35922bcb req-3522d00b-7c9e-4f62-9a8d-4d7eedff6437 service nova] [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] Refreshing network info cache for port b5922da6-f3d2-478a-8756-ea7020186366 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1572.339444] env[62405]: INFO nova.compute.manager [None req-2a532060-fb07-4421-b55c-c37fca490a6c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: ca4d11fe-1d0f-468b-a2f4-21c5b84342ab] instance snapshotting [ 1572.340880] env[62405]: WARNING nova.compute.manager [None req-2a532060-fb07-4421-b55c-c37fca490a6c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: ca4d11fe-1d0f-468b-a2f4-21c5b84342ab] trying to snapshot a non-running instance: (state: 4 expected: 1) [ 1572.344183] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef3bdc45-d7db-49af-9b5f-06a6f4d35372 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.370368] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53b43006-b200-42f2-a554-49c3d0946ee3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.488806] env[62405]: DEBUG oslo_vmware.api [None req-3e4d01c4-e5d5-43fc-99ce-5a64916ac21b tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] Task: {'id': task-1946962, 'name': RelocateVM_Task, 'duration_secs': 0.613373} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1572.489198] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-3e4d01c4-e5d5-43fc-99ce-5a64916ac21b tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] Volume attach. Driver type: vmdk {{(pid=62405) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1572.489851] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-3e4d01c4-e5d5-43fc-99ce-5a64916ac21b tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-401333', 'volume_id': '2c06d022-a782-4194-9dee-348bf3888516', 'name': 'volume-2c06d022-a782-4194-9dee-348bf3888516', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9', 'attached_at': '', 'detached_at': '', 'volume_id': '2c06d022-a782-4194-9dee-348bf3888516', 'serial': '2c06d022-a782-4194-9dee-348bf3888516'} {{(pid=62405) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1572.490336] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25852c05-94c9-4df2-9c1b-42115d365741 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.516984] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb0bee9c-e894-416d-86b8-2a17d19822c9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.543165] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-3e4d01c4-e5d5-43fc-99ce-5a64916ac21b tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] Reconfiguring VM instance instance-0000001c to attach disk [datastore1] volume-2c06d022-a782-4194-9dee-348bf3888516/volume-2c06d022-a782-4194-9dee-348bf3888516.vmdk or device None with type thin {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1572.543634] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b6cf0049-ca5d-4e62-9207-ecfe3a6d9b09 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.559172] env[62405]: DEBUG oslo_concurrency.lockutils [req-1ceb959f-e6c2-4011-9955-99114aff4fb3 req-8c2ea707-2fbb-492a-92cf-2a9fd52f43de service nova] Releasing lock "refresh_cache-262424b0-dc7d-4b6c-9539-2d6cd23a93da" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1572.566290] env[62405]: DEBUG oslo_vmware.api [None req-3e4d01c4-e5d5-43fc-99ce-5a64916ac21b tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] Waiting for the task: (returnval){ [ 1572.566290] env[62405]: value = "task-1946964" [ 1572.566290] env[62405]: _type = "Task" [ 1572.566290] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1572.576300] env[62405]: DEBUG oslo_vmware.api [None req-3e4d01c4-e5d5-43fc-99ce-5a64916ac21b tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] Task: {'id': task-1946964, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1572.596812] env[62405]: DEBUG oslo_concurrency.lockutils [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.715s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1572.597385] env[62405]: DEBUG nova.compute.manager [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] [instance: 14dab775-19b4-4d0d-a7ee-67705f7e45ca] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1572.601883] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.312s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1572.603530] env[62405]: INFO nova.compute.claims [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 900b95b5-fe5a-46c1-909a-f81b82ced0ef] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1572.713739] env[62405]: DEBUG oslo_vmware.api [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1946963, 'name': Rename_Task, 'duration_secs': 0.189402} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1572.718019] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: e8ed73c3-fb86-42c3-aae6-b0c8d03149ce] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1572.718019] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-22ba3e19-b4b0-4402-8d33-f4228040e303 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.723297] env[62405]: DEBUG oslo_vmware.api [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for the task: (returnval){ [ 1572.723297] env[62405]: value = "task-1946965" [ 1572.723297] env[62405]: _type = "Task" [ 1572.723297] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1572.731822] env[62405]: DEBUG oslo_vmware.api [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1946965, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1572.884494] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-2a532060-fb07-4421-b55c-c37fca490a6c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: ca4d11fe-1d0f-468b-a2f4-21c5b84342ab] Creating Snapshot of the VM instance {{(pid=62405) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1572.885112] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-27740bea-71f4-4184-85de-bac6b2c9a6be {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.898147] env[62405]: DEBUG oslo_vmware.api [None req-2a532060-fb07-4421-b55c-c37fca490a6c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Waiting for the task: (returnval){ [ 1572.898147] env[62405]: value = "task-1946966" [ 1572.898147] env[62405]: _type = "Task" [ 1572.898147] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1572.908127] env[62405]: DEBUG oslo_vmware.api [None req-2a532060-fb07-4421-b55c-c37fca490a6c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1946966, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1573.077930] env[62405]: DEBUG oslo_vmware.api [None req-3e4d01c4-e5d5-43fc-99ce-5a64916ac21b tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] Task: {'id': task-1946964, 'name': ReconfigVM_Task, 'duration_secs': 0.405618} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1573.078351] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-3e4d01c4-e5d5-43fc-99ce-5a64916ac21b tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] Reconfigured VM instance instance-0000001c to attach disk [datastore1] volume-2c06d022-a782-4194-9dee-348bf3888516/volume-2c06d022-a782-4194-9dee-348bf3888516.vmdk or device None with type thin {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1573.083155] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cfb90974-fa41-4f13-8bb7-afa06ceeb153 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.098510] env[62405]: DEBUG oslo_vmware.api [None req-3e4d01c4-e5d5-43fc-99ce-5a64916ac21b tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] Waiting for the task: (returnval){ [ 1573.098510] env[62405]: value = "task-1946967" [ 1573.098510] env[62405]: _type = "Task" [ 1573.098510] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1573.112411] env[62405]: DEBUG nova.compute.utils [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1573.113962] env[62405]: DEBUG oslo_vmware.api [None req-3e4d01c4-e5d5-43fc-99ce-5a64916ac21b tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] Task: {'id': task-1946967, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1573.116349] env[62405]: DEBUG nova.compute.manager [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] [instance: 14dab775-19b4-4d0d-a7ee-67705f7e45ca] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1573.116562] env[62405]: DEBUG nova.network.neutron [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] [instance: 14dab775-19b4-4d0d-a7ee-67705f7e45ca] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1573.176101] env[62405]: DEBUG nova.policy [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'af192634fc4847c598c198e5e92e4b32', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '64c9c25765314168a99388fa3472e5c9', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1573.233803] env[62405]: DEBUG oslo_vmware.api [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1946965, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1573.409812] env[62405]: DEBUG oslo_vmware.api [None req-2a532060-fb07-4421-b55c-c37fca490a6c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1946966, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1573.493356] env[62405]: DEBUG nova.network.neutron [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] [instance: 14dab775-19b4-4d0d-a7ee-67705f7e45ca] Successfully created port: b2b20164-38d1-48ac-a12b-c190f4aa9d22 {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1573.524422] env[62405]: DEBUG nova.network.neutron [req-d3e08ffb-0c96-4e6e-8316-d1bd35922bcb req-3522d00b-7c9e-4f62-9a8d-4d7eedff6437 service nova] [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] Updated VIF entry in instance network info cache for port b5922da6-f3d2-478a-8756-ea7020186366. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1573.525398] env[62405]: DEBUG nova.network.neutron [req-d3e08ffb-0c96-4e6e-8316-d1bd35922bcb req-3522d00b-7c9e-4f62-9a8d-4d7eedff6437 service nova] [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] Updating instance_info_cache with network_info: [{"id": "b5922da6-f3d2-478a-8756-ea7020186366", "address": "fa:16:3e:2e:48:34", "network": {"id": "8c716b29-2304-47f6-8885-183e7c81bda2", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-73064672-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3c96e0244edf49db9cd520b5e359fc87", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9643129c-1d95-4422-9df1-2c21289bd5d6", "external-id": "nsx-vlan-transportzone-917", "segmentation_id": 917, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb5922da6-f3", "ovs_interfaceid": "b5922da6-f3d2-478a-8756-ea7020186366", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1573.612547] env[62405]: DEBUG oslo_vmware.api [None req-3e4d01c4-e5d5-43fc-99ce-5a64916ac21b tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] Task: {'id': task-1946967, 'name': ReconfigVM_Task, 'duration_secs': 0.182674} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1573.616409] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-3e4d01c4-e5d5-43fc-99ce-5a64916ac21b tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-401333', 'volume_id': '2c06d022-a782-4194-9dee-348bf3888516', 'name': 'volume-2c06d022-a782-4194-9dee-348bf3888516', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9', 'attached_at': '', 'detached_at': '', 'volume_id': '2c06d022-a782-4194-9dee-348bf3888516', 'serial': '2c06d022-a782-4194-9dee-348bf3888516'} {{(pid=62405) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1573.616953] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-719016c2-045b-4313-9b45-5773900f0d40 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.619854] env[62405]: DEBUG nova.compute.manager [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] [instance: 14dab775-19b4-4d0d-a7ee-67705f7e45ca] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1573.635290] env[62405]: DEBUG oslo_vmware.api [None req-3e4d01c4-e5d5-43fc-99ce-5a64916ac21b tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] Waiting for the task: (returnval){ [ 1573.635290] env[62405]: value = "task-1946968" [ 1573.635290] env[62405]: _type = "Task" [ 1573.635290] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1573.649074] env[62405]: DEBUG oslo_vmware.api [None req-3e4d01c4-e5d5-43fc-99ce-5a64916ac21b tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] Task: {'id': task-1946968, 'name': Rename_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1573.707944] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b644e2f9-cda9-46a3-bf08-3bcc97fc1d90 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Acquiring lock "0f2d81cb-da2a-4664-b9a2-b8c3c38ddc73" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1573.708208] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b644e2f9-cda9-46a3-bf08-3bcc97fc1d90 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Lock "0f2d81cb-da2a-4664-b9a2-b8c3c38ddc73" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1573.735293] env[62405]: DEBUG oslo_vmware.api [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1946965, 'name': PowerOnVM_Task, 'duration_secs': 0.899789} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1573.735631] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: e8ed73c3-fb86-42c3-aae6-b0c8d03149ce] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1573.735837] env[62405]: INFO nova.compute.manager [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: e8ed73c3-fb86-42c3-aae6-b0c8d03149ce] Took 9.04 seconds to spawn the instance on the hypervisor. [ 1573.736038] env[62405]: DEBUG nova.compute.manager [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: e8ed73c3-fb86-42c3-aae6-b0c8d03149ce] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1573.736954] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cf7af6e-b2ad-4081-9ccf-6285beeb5fad {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.913638] env[62405]: DEBUG oslo_vmware.api [None req-2a532060-fb07-4421-b55c-c37fca490a6c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1946966, 'name': CreateSnapshot_Task, 'duration_secs': 0.801421} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1573.914249] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-2a532060-fb07-4421-b55c-c37fca490a6c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: ca4d11fe-1d0f-468b-a2f4-21c5b84342ab] Created Snapshot of the VM instance {{(pid=62405) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1573.915451] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-598976f5-1644-4bdf-aba5-597a5eb61fc5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.030095] env[62405]: DEBUG oslo_concurrency.lockutils [req-d3e08ffb-0c96-4e6e-8316-d1bd35922bcb req-3522d00b-7c9e-4f62-9a8d-4d7eedff6437 service nova] Releasing lock "refresh_cache-fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1574.030420] env[62405]: DEBUG nova.compute.manager [req-d3e08ffb-0c96-4e6e-8316-d1bd35922bcb req-3522d00b-7c9e-4f62-9a8d-4d7eedff6437 service nova] [instance: 262424b0-dc7d-4b6c-9539-2d6cd23a93da] Received event network-changed-0974798a-a146-421e-a104-caeb56db51b3 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1574.030595] env[62405]: DEBUG nova.compute.manager [req-d3e08ffb-0c96-4e6e-8316-d1bd35922bcb req-3522d00b-7c9e-4f62-9a8d-4d7eedff6437 service nova] [instance: 262424b0-dc7d-4b6c-9539-2d6cd23a93da] Refreshing instance network info cache due to event network-changed-0974798a-a146-421e-a104-caeb56db51b3. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1574.031534] env[62405]: DEBUG oslo_concurrency.lockutils [req-d3e08ffb-0c96-4e6e-8316-d1bd35922bcb req-3522d00b-7c9e-4f62-9a8d-4d7eedff6437 service nova] Acquiring lock "refresh_cache-262424b0-dc7d-4b6c-9539-2d6cd23a93da" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1574.031534] env[62405]: DEBUG oslo_concurrency.lockutils [req-d3e08ffb-0c96-4e6e-8316-d1bd35922bcb req-3522d00b-7c9e-4f62-9a8d-4d7eedff6437 service nova] Acquired lock "refresh_cache-262424b0-dc7d-4b6c-9539-2d6cd23a93da" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1574.031534] env[62405]: DEBUG nova.network.neutron [req-d3e08ffb-0c96-4e6e-8316-d1bd35922bcb req-3522d00b-7c9e-4f62-9a8d-4d7eedff6437 service nova] [instance: 262424b0-dc7d-4b6c-9539-2d6cd23a93da] Refreshing network info cache for port 0974798a-a146-421e-a104-caeb56db51b3 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1574.146512] env[62405]: DEBUG oslo_vmware.api [None req-3e4d01c4-e5d5-43fc-99ce-5a64916ac21b tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] Task: {'id': task-1946968, 'name': Rename_Task, 'duration_secs': 0.190291} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1574.146806] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e4d01c4-e5d5-43fc-99ce-5a64916ac21b tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1574.147072] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-642d99e0-9e9c-447e-986e-28c50eab56e9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.155111] env[62405]: DEBUG oslo_vmware.api [None req-3e4d01c4-e5d5-43fc-99ce-5a64916ac21b tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] Waiting for the task: (returnval){ [ 1574.155111] env[62405]: value = "task-1946969" [ 1574.155111] env[62405]: _type = "Task" [ 1574.155111] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1574.165907] env[62405]: DEBUG oslo_vmware.api [None req-3e4d01c4-e5d5-43fc-99ce-5a64916ac21b tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] Task: {'id': task-1946969, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1574.267733] env[62405]: INFO nova.compute.manager [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: e8ed73c3-fb86-42c3-aae6-b0c8d03149ce] Took 51.78 seconds to build instance. [ 1574.335381] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f27e8618-e538-415a-b626-a1e7bc82809f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.344073] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c65d2c0-cd7f-4f53-9296-c6cc2c604247 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.379072] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b239e0a8-31b8-48ff-b38f-2bac3325061d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.388294] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c8e2399-0278-4a88-bf43-411c44b8c01f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.402951] env[62405]: DEBUG nova.compute.provider_tree [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1574.438501] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-2a532060-fb07-4421-b55c-c37fca490a6c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: ca4d11fe-1d0f-468b-a2f4-21c5b84342ab] Creating linked-clone VM from snapshot {{(pid=62405) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1574.438816] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-2e7340ed-59ee-458e-a675-211edf18b728 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.448169] env[62405]: DEBUG oslo_vmware.api [None req-2a532060-fb07-4421-b55c-c37fca490a6c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Waiting for the task: (returnval){ [ 1574.448169] env[62405]: value = "task-1946970" [ 1574.448169] env[62405]: _type = "Task" [ 1574.448169] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1574.456363] env[62405]: DEBUG oslo_vmware.api [None req-2a532060-fb07-4421-b55c-c37fca490a6c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1946970, 'name': CloneVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1574.632339] env[62405]: DEBUG nova.compute.manager [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] [instance: 14dab775-19b4-4d0d-a7ee-67705f7e45ca] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1574.671984] env[62405]: DEBUG oslo_vmware.api [None req-3e4d01c4-e5d5-43fc-99ce-5a64916ac21b tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] Task: {'id': task-1946969, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1574.675541] env[62405]: DEBUG nova.virt.hardware [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1574.675957] env[62405]: DEBUG nova.virt.hardware [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1574.675957] env[62405]: DEBUG nova.virt.hardware [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1574.676207] env[62405]: DEBUG nova.virt.hardware [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1574.676281] env[62405]: DEBUG nova.virt.hardware [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1574.676526] env[62405]: DEBUG nova.virt.hardware [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1574.676824] env[62405]: DEBUG nova.virt.hardware [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1574.677125] env[62405]: DEBUG nova.virt.hardware [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1574.677410] env[62405]: DEBUG nova.virt.hardware [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1574.677546] env[62405]: DEBUG nova.virt.hardware [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1574.678091] env[62405]: DEBUG nova.virt.hardware [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1574.680153] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46779beb-7eff-47fb-82e2-f4b8c1990d01 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.689054] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f9bb155-762a-49de-a5bb-a0e2d8fab533 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.715726] env[62405]: DEBUG nova.compute.manager [None req-0885717b-13bf-4773-b81e-724d074ce58c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] [instance: fbedaa93-5968-4b42-b93e-201d2b44b32b] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1574.716821] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5099d20-e25f-43ee-9062-6a0152f7ee53 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.771484] env[62405]: DEBUG oslo_concurrency.lockutils [None req-26196d78-fde2-4777-b695-3ce9b3796c54 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Lock "e8ed73c3-fb86-42c3-aae6-b0c8d03149ce" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 65.804s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1574.910029] env[62405]: DEBUG nova.scheduler.client.report [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1574.959977] env[62405]: DEBUG oslo_vmware.api [None req-2a532060-fb07-4421-b55c-c37fca490a6c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1946970, 'name': CloneVM_Task} progress is 94%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1575.051421] env[62405]: DEBUG nova.compute.manager [req-5acc32b5-679d-457f-8d92-28e0776fe88a req-05c778b0-f4c3-4199-ae96-b53df555a043 service nova] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Received event network-changed-7e786917-4e46-4359-899e-afc1456451ae {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1575.051823] env[62405]: DEBUG nova.compute.manager [req-5acc32b5-679d-457f-8d92-28e0776fe88a req-05c778b0-f4c3-4199-ae96-b53df555a043 service nova] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Refreshing instance network info cache due to event network-changed-7e786917-4e46-4359-899e-afc1456451ae. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1575.053101] env[62405]: DEBUG oslo_concurrency.lockutils [req-5acc32b5-679d-457f-8d92-28e0776fe88a req-05c778b0-f4c3-4199-ae96-b53df555a043 service nova] Acquiring lock "refresh_cache-15218373-ffa5-49ce-b604-423b7fc5fb35" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1575.053589] env[62405]: DEBUG oslo_concurrency.lockutils [req-5acc32b5-679d-457f-8d92-28e0776fe88a req-05c778b0-f4c3-4199-ae96-b53df555a043 service nova] Acquired lock "refresh_cache-15218373-ffa5-49ce-b604-423b7fc5fb35" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1575.053906] env[62405]: DEBUG nova.network.neutron [req-5acc32b5-679d-457f-8d92-28e0776fe88a req-05c778b0-f4c3-4199-ae96-b53df555a043 service nova] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Refreshing network info cache for port 7e786917-4e46-4359-899e-afc1456451ae {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1575.077072] env[62405]: DEBUG nova.network.neutron [req-d3e08ffb-0c96-4e6e-8316-d1bd35922bcb req-3522d00b-7c9e-4f62-9a8d-4d7eedff6437 service nova] [instance: 262424b0-dc7d-4b6c-9539-2d6cd23a93da] Updated VIF entry in instance network info cache for port 0974798a-a146-421e-a104-caeb56db51b3. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1575.077072] env[62405]: DEBUG nova.network.neutron [req-d3e08ffb-0c96-4e6e-8316-d1bd35922bcb req-3522d00b-7c9e-4f62-9a8d-4d7eedff6437 service nova] [instance: 262424b0-dc7d-4b6c-9539-2d6cd23a93da] Updating instance_info_cache with network_info: [{"id": "0974798a-a146-421e-a104-caeb56db51b3", "address": "fa:16:3e:9a:d1:33", "network": {"id": "869979f7-5a22-4c11-bb77-c48a5d5f934f", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1534576533-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6d1aee7c44f44abc86ed5c15b027e989", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08e9585e-6186-4788-9fd9-24174ce45a6f", "external-id": "nsx-vlan-transportzone-254", "segmentation_id": 254, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0974798a-a1", "ovs_interfaceid": "0974798a-a146-421e-a104-caeb56db51b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1575.174444] env[62405]: DEBUG oslo_vmware.api [None req-3e4d01c4-e5d5-43fc-99ce-5a64916ac21b tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] Task: {'id': task-1946969, 'name': PowerOnVM_Task, 'duration_secs': 0.617678} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1575.174444] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e4d01c4-e5d5-43fc-99ce-5a64916ac21b tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1575.174444] env[62405]: INFO nova.compute.manager [None req-3e4d01c4-e5d5-43fc-99ce-5a64916ac21b tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] Took 5.91 seconds to spawn the instance on the hypervisor. [ 1575.174444] env[62405]: DEBUG nova.compute.manager [None req-3e4d01c4-e5d5-43fc-99ce-5a64916ac21b tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1575.176363] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e47320c2-ecd6-4325-8674-f76126c5207e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.234459] env[62405]: INFO nova.compute.manager [None req-0885717b-13bf-4773-b81e-724d074ce58c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] [instance: fbedaa93-5968-4b42-b93e-201d2b44b32b] instance snapshotting [ 1575.237532] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7071191f-f304-4f2f-ae60-aaf79ad1e102 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.263016] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70a9946d-c20b-44da-b580-c598e62cfc8c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.273150] env[62405]: DEBUG nova.compute.manager [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] [instance: 3c05b8fc-32b8-42e4-b3c2-95d1cdbc3dc8] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1575.281500] env[62405]: DEBUG nova.compute.manager [req-c80b4938-4c14-4cd3-9846-4b82cc6bb6b1 req-61befe10-9b91-4b68-be91-0b820869f7f2 service nova] [instance: 14dab775-19b4-4d0d-a7ee-67705f7e45ca] Received event network-vif-plugged-b2b20164-38d1-48ac-a12b-c190f4aa9d22 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1575.281768] env[62405]: DEBUG oslo_concurrency.lockutils [req-c80b4938-4c14-4cd3-9846-4b82cc6bb6b1 req-61befe10-9b91-4b68-be91-0b820869f7f2 service nova] Acquiring lock "14dab775-19b4-4d0d-a7ee-67705f7e45ca-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1575.281983] env[62405]: DEBUG oslo_concurrency.lockutils [req-c80b4938-4c14-4cd3-9846-4b82cc6bb6b1 req-61befe10-9b91-4b68-be91-0b820869f7f2 service nova] Lock "14dab775-19b4-4d0d-a7ee-67705f7e45ca-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1575.282192] env[62405]: DEBUG oslo_concurrency.lockutils [req-c80b4938-4c14-4cd3-9846-4b82cc6bb6b1 req-61befe10-9b91-4b68-be91-0b820869f7f2 service nova] Lock "14dab775-19b4-4d0d-a7ee-67705f7e45ca-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1575.282376] env[62405]: DEBUG nova.compute.manager [req-c80b4938-4c14-4cd3-9846-4b82cc6bb6b1 req-61befe10-9b91-4b68-be91-0b820869f7f2 service nova] [instance: 14dab775-19b4-4d0d-a7ee-67705f7e45ca] No waiting events found dispatching network-vif-plugged-b2b20164-38d1-48ac-a12b-c190f4aa9d22 {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1575.282548] env[62405]: WARNING nova.compute.manager [req-c80b4938-4c14-4cd3-9846-4b82cc6bb6b1 req-61befe10-9b91-4b68-be91-0b820869f7f2 service nova] [instance: 14dab775-19b4-4d0d-a7ee-67705f7e45ca] Received unexpected event network-vif-plugged-b2b20164-38d1-48ac-a12b-c190f4aa9d22 for instance with vm_state building and task_state spawning. [ 1575.332063] env[62405]: DEBUG nova.network.neutron [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] [instance: 14dab775-19b4-4d0d-a7ee-67705f7e45ca] Successfully updated port: b2b20164-38d1-48ac-a12b-c190f4aa9d22 {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1575.412531] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.811s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1575.413058] env[62405]: DEBUG nova.compute.manager [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 900b95b5-fe5a-46c1-909a-f81b82ced0ef] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1575.416049] env[62405]: DEBUG oslo_concurrency.lockutils [None req-dab938bf-c818-479f-b806-6c3c4b80f244 tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.316s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1575.416282] env[62405]: DEBUG nova.objects.instance [None req-dab938bf-c818-479f-b806-6c3c4b80f244 tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Lazy-loading 'resources' on Instance uuid 7db1b086-942e-4890-8750-0d717e522786 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1575.462200] env[62405]: DEBUG oslo_vmware.api [None req-2a532060-fb07-4421-b55c-c37fca490a6c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1946970, 'name': CloneVM_Task} progress is 94%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1575.579621] env[62405]: DEBUG oslo_concurrency.lockutils [req-d3e08ffb-0c96-4e6e-8316-d1bd35922bcb req-3522d00b-7c9e-4f62-9a8d-4d7eedff6437 service nova] Releasing lock "refresh_cache-262424b0-dc7d-4b6c-9539-2d6cd23a93da" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1575.579621] env[62405]: DEBUG nova.compute.manager [req-d3e08ffb-0c96-4e6e-8316-d1bd35922bcb req-3522d00b-7c9e-4f62-9a8d-4d7eedff6437 service nova] [instance: 65462c7a-372e-4ba6-8f6d-e300080d65d0] Received event network-changed-62da0bb8-4a2d-4e69-a4da-3970ca057cad {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1575.579815] env[62405]: DEBUG nova.compute.manager [req-d3e08ffb-0c96-4e6e-8316-d1bd35922bcb req-3522d00b-7c9e-4f62-9a8d-4d7eedff6437 service nova] [instance: 65462c7a-372e-4ba6-8f6d-e300080d65d0] Refreshing instance network info cache due to event network-changed-62da0bb8-4a2d-4e69-a4da-3970ca057cad. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1575.579912] env[62405]: DEBUG oslo_concurrency.lockutils [req-d3e08ffb-0c96-4e6e-8316-d1bd35922bcb req-3522d00b-7c9e-4f62-9a8d-4d7eedff6437 service nova] Acquiring lock "refresh_cache-65462c7a-372e-4ba6-8f6d-e300080d65d0" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1575.580074] env[62405]: DEBUG oslo_concurrency.lockutils [req-d3e08ffb-0c96-4e6e-8316-d1bd35922bcb req-3522d00b-7c9e-4f62-9a8d-4d7eedff6437 service nova] Acquired lock "refresh_cache-65462c7a-372e-4ba6-8f6d-e300080d65d0" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1575.580890] env[62405]: DEBUG nova.network.neutron [req-d3e08ffb-0c96-4e6e-8316-d1bd35922bcb req-3522d00b-7c9e-4f62-9a8d-4d7eedff6437 service nova] [instance: 65462c7a-372e-4ba6-8f6d-e300080d65d0] Refreshing network info cache for port 62da0bb8-4a2d-4e69-a4da-3970ca057cad {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1575.656446] env[62405]: DEBUG oslo_concurrency.lockutils [None req-588d5a69-c10c-4bb0-a7e8-764f960d4d1a tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Acquiring lock "65462c7a-372e-4ba6-8f6d-e300080d65d0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1575.656446] env[62405]: DEBUG oslo_concurrency.lockutils [None req-588d5a69-c10c-4bb0-a7e8-764f960d4d1a tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Lock "65462c7a-372e-4ba6-8f6d-e300080d65d0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1575.656662] env[62405]: DEBUG oslo_concurrency.lockutils [None req-588d5a69-c10c-4bb0-a7e8-764f960d4d1a tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Acquiring lock "65462c7a-372e-4ba6-8f6d-e300080d65d0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1575.656838] env[62405]: DEBUG oslo_concurrency.lockutils [None req-588d5a69-c10c-4bb0-a7e8-764f960d4d1a tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Lock "65462c7a-372e-4ba6-8f6d-e300080d65d0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1575.657009] env[62405]: DEBUG oslo_concurrency.lockutils [None req-588d5a69-c10c-4bb0-a7e8-764f960d4d1a tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Lock "65462c7a-372e-4ba6-8f6d-e300080d65d0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1575.659663] env[62405]: INFO nova.compute.manager [None req-588d5a69-c10c-4bb0-a7e8-764f960d4d1a tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] [instance: 65462c7a-372e-4ba6-8f6d-e300080d65d0] Terminating instance [ 1575.700476] env[62405]: INFO nova.compute.manager [None req-3e4d01c4-e5d5-43fc-99ce-5a64916ac21b tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] Took 49.00 seconds to build instance. [ 1575.780021] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-0885717b-13bf-4773-b81e-724d074ce58c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] [instance: fbedaa93-5968-4b42-b93e-201d2b44b32b] Creating Snapshot of the VM instance {{(pid=62405) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1575.780021] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-0f3f3e0d-6be5-4db3-b445-b71c13c651f5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.792988] env[62405]: DEBUG oslo_vmware.api [None req-0885717b-13bf-4773-b81e-724d074ce58c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Waiting for the task: (returnval){ [ 1575.792988] env[62405]: value = "task-1946971" [ 1575.792988] env[62405]: _type = "Task" [ 1575.792988] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1575.805033] env[62405]: DEBUG oslo_vmware.api [None req-0885717b-13bf-4773-b81e-724d074ce58c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Task: {'id': task-1946971, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1575.806092] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1575.813402] env[62405]: DEBUG oslo_concurrency.lockutils [None req-22b89288-dc0d-4ebf-b710-e994341eb245 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Acquiring lock "1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1575.813678] env[62405]: DEBUG oslo_concurrency.lockutils [None req-22b89288-dc0d-4ebf-b710-e994341eb245 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Lock "1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1575.834996] env[62405]: DEBUG oslo_concurrency.lockutils [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Acquiring lock "refresh_cache-14dab775-19b4-4d0d-a7ee-67705f7e45ca" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1575.835295] env[62405]: DEBUG oslo_concurrency.lockutils [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Acquired lock "refresh_cache-14dab775-19b4-4d0d-a7ee-67705f7e45ca" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1575.835351] env[62405]: DEBUG nova.network.neutron [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] [instance: 14dab775-19b4-4d0d-a7ee-67705f7e45ca] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1575.920785] env[62405]: DEBUG nova.compute.utils [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1575.927167] env[62405]: DEBUG nova.compute.manager [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 900b95b5-fe5a-46c1-909a-f81b82ced0ef] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1575.927167] env[62405]: DEBUG nova.network.neutron [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 900b95b5-fe5a-46c1-909a-f81b82ced0ef] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1575.964137] env[62405]: DEBUG oslo_vmware.api [None req-2a532060-fb07-4421-b55c-c37fca490a6c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1946970, 'name': CloneVM_Task} progress is 94%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1576.005578] env[62405]: DEBUG nova.policy [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f5f866535fb94dd0b0ddddddd7da60b2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '41626e27199f4370a2554bb243a72d41', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1576.162205] env[62405]: DEBUG nova.network.neutron [req-5acc32b5-679d-457f-8d92-28e0776fe88a req-05c778b0-f4c3-4199-ae96-b53df555a043 service nova] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Updated VIF entry in instance network info cache for port 7e786917-4e46-4359-899e-afc1456451ae. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1576.162582] env[62405]: DEBUG nova.network.neutron [req-5acc32b5-679d-457f-8d92-28e0776fe88a req-05c778b0-f4c3-4199-ae96-b53df555a043 service nova] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Updating instance_info_cache with network_info: [{"id": "7e786917-4e46-4359-899e-afc1456451ae", "address": "fa:16:3e:75:14:e2", "network": {"id": "3ca0a5a2-a18f-47fa-9d7b-0e27aa414878", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1312490542-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.142", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f3b50cc219314108945bfc8b2c21849a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7edb7c08-2fae-4df5-9ec6-5ccf06d7e337", "external-id": "nsx-vlan-transportzone-309", "segmentation_id": 309, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e786917-4e", "ovs_interfaceid": "7e786917-4e46-4359-899e-afc1456451ae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1576.164100] env[62405]: DEBUG nova.compute.manager [None req-588d5a69-c10c-4bb0-a7e8-764f960d4d1a tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] [instance: 65462c7a-372e-4ba6-8f6d-e300080d65d0] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1576.164502] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-588d5a69-c10c-4bb0-a7e8-764f960d4d1a tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] [instance: 65462c7a-372e-4ba6-8f6d-e300080d65d0] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1576.165634] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4155fed2-f852-4e22-bad6-ed9920b57160 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.177130] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-588d5a69-c10c-4bb0-a7e8-764f960d4d1a tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] [instance: 65462c7a-372e-4ba6-8f6d-e300080d65d0] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1576.180351] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-24dc545d-29d8-43b6-af31-493834b66619 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.189305] env[62405]: DEBUG oslo_vmware.api [None req-588d5a69-c10c-4bb0-a7e8-764f960d4d1a tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Waiting for the task: (returnval){ [ 1576.189305] env[62405]: value = "task-1946972" [ 1576.189305] env[62405]: _type = "Task" [ 1576.189305] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1576.202552] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3e4d01c4-e5d5-43fc-99ce-5a64916ac21b tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] Lock "fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 53.746s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1576.203132] env[62405]: DEBUG oslo_vmware.api [None req-588d5a69-c10c-4bb0-a7e8-764f960d4d1a tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Task: {'id': task-1946972, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1576.274263] env[62405]: INFO nova.compute.manager [None req-34e409f5-4968-4fce-8e2c-e9aa257078cb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: e8ed73c3-fb86-42c3-aae6-b0c8d03149ce] Rebuilding instance [ 1576.315174] env[62405]: DEBUG oslo_vmware.api [None req-0885717b-13bf-4773-b81e-724d074ce58c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Task: {'id': task-1946971, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1576.343548] env[62405]: DEBUG nova.compute.manager [None req-34e409f5-4968-4fce-8e2c-e9aa257078cb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: e8ed73c3-fb86-42c3-aae6-b0c8d03149ce] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1576.346327] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48ba3bde-fa03-4b55-8767-82cd240b9552 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.416401] env[62405]: DEBUG nova.network.neutron [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] [instance: 14dab775-19b4-4d0d-a7ee-67705f7e45ca] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1576.425556] env[62405]: DEBUG nova.compute.manager [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 900b95b5-fe5a-46c1-909a-f81b82ced0ef] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1576.464500] env[62405]: DEBUG oslo_vmware.api [None req-2a532060-fb07-4421-b55c-c37fca490a6c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1946970, 'name': CloneVM_Task} progress is 95%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1576.520210] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a5f776c-da64-49d4-94f0-cfaac97090cc {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.532457] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f146976c-19e2-4b10-a32e-d746e6294637 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.570123] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3268fdfe-6f52-48b8-950b-bb3eadfc9fc6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.576921] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-974e0e6a-a549-4bf7-b749-04993b8d946d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.591245] env[62405]: DEBUG nova.compute.provider_tree [None req-dab938bf-c818-479f-b806-6c3c4b80f244 tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1576.664965] env[62405]: DEBUG nova.network.neutron [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] [instance: 14dab775-19b4-4d0d-a7ee-67705f7e45ca] Updating instance_info_cache with network_info: [{"id": "b2b20164-38d1-48ac-a12b-c190f4aa9d22", "address": "fa:16:3e:26:24:2b", "network": {"id": "a2e514c6-e752-472b-8b89-ef96dce4843e", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-614451256-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "64c9c25765314168a99388fa3472e5c9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cc0a33d-17c0-4b87-b48f-413a87a4cc6a", "external-id": "nsx-vlan-transportzone-865", "segmentation_id": 865, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb2b20164-38", "ovs_interfaceid": "b2b20164-38d1-48ac-a12b-c190f4aa9d22", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1576.666892] env[62405]: DEBUG oslo_concurrency.lockutils [req-5acc32b5-679d-457f-8d92-28e0776fe88a req-05c778b0-f4c3-4199-ae96-b53df555a043 service nova] Releasing lock "refresh_cache-15218373-ffa5-49ce-b604-423b7fc5fb35" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1576.666892] env[62405]: DEBUG nova.compute.manager [req-5acc32b5-679d-457f-8d92-28e0776fe88a req-05c778b0-f4c3-4199-ae96-b53df555a043 service nova] [instance: 65462c7a-372e-4ba6-8f6d-e300080d65d0] Received event network-changed-62da0bb8-4a2d-4e69-a4da-3970ca057cad {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1576.666892] env[62405]: DEBUG nova.compute.manager [req-5acc32b5-679d-457f-8d92-28e0776fe88a req-05c778b0-f4c3-4199-ae96-b53df555a043 service nova] [instance: 65462c7a-372e-4ba6-8f6d-e300080d65d0] Refreshing instance network info cache due to event network-changed-62da0bb8-4a2d-4e69-a4da-3970ca057cad. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1576.666892] env[62405]: DEBUG oslo_concurrency.lockutils [req-5acc32b5-679d-457f-8d92-28e0776fe88a req-05c778b0-f4c3-4199-ae96-b53df555a043 service nova] Acquiring lock "refresh_cache-65462c7a-372e-4ba6-8f6d-e300080d65d0" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1576.703568] env[62405]: DEBUG oslo_vmware.api [None req-588d5a69-c10c-4bb0-a7e8-764f960d4d1a tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Task: {'id': task-1946972, 'name': PowerOffVM_Task, 'duration_secs': 0.216539} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1576.703876] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-588d5a69-c10c-4bb0-a7e8-764f960d4d1a tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] [instance: 65462c7a-372e-4ba6-8f6d-e300080d65d0] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1576.704367] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-588d5a69-c10c-4bb0-a7e8-764f960d4d1a tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] [instance: 65462c7a-372e-4ba6-8f6d-e300080d65d0] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1576.704559] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e3cfcc2c-897e-4a5a-a456-965c4511eb14 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.711084] env[62405]: DEBUG nova.compute.manager [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 4d59d9fd-23df-4933-97ed-32602e51e9aa] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1576.726684] env[62405]: DEBUG nova.network.neutron [req-d3e08ffb-0c96-4e6e-8316-d1bd35922bcb req-3522d00b-7c9e-4f62-9a8d-4d7eedff6437 service nova] [instance: 65462c7a-372e-4ba6-8f6d-e300080d65d0] Updated VIF entry in instance network info cache for port 62da0bb8-4a2d-4e69-a4da-3970ca057cad. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1576.730426] env[62405]: DEBUG nova.network.neutron [req-d3e08ffb-0c96-4e6e-8316-d1bd35922bcb req-3522d00b-7c9e-4f62-9a8d-4d7eedff6437 service nova] [instance: 65462c7a-372e-4ba6-8f6d-e300080d65d0] Updating instance_info_cache with network_info: [{"id": "62da0bb8-4a2d-4e69-a4da-3970ca057cad", "address": "fa:16:3e:34:fe:9b", "network": {"id": "869979f7-5a22-4c11-bb77-c48a5d5f934f", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1534576533-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6d1aee7c44f44abc86ed5c15b027e989", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08e9585e-6186-4788-9fd9-24174ce45a6f", "external-id": "nsx-vlan-transportzone-254", "segmentation_id": 254, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap62da0bb8-4a", "ovs_interfaceid": "62da0bb8-4a2d-4e69-a4da-3970ca057cad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1576.775898] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-588d5a69-c10c-4bb0-a7e8-764f960d4d1a tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] [instance: 65462c7a-372e-4ba6-8f6d-e300080d65d0] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1576.776714] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-588d5a69-c10c-4bb0-a7e8-764f960d4d1a tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] [instance: 65462c7a-372e-4ba6-8f6d-e300080d65d0] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1576.776714] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-588d5a69-c10c-4bb0-a7e8-764f960d4d1a tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Deleting the datastore file [datastore1] 65462c7a-372e-4ba6-8f6d-e300080d65d0 {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1576.776805] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-87ac64bc-fc5b-41fa-b661-73cf7ad64cda {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.784025] env[62405]: DEBUG oslo_vmware.api [None req-588d5a69-c10c-4bb0-a7e8-764f960d4d1a tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Waiting for the task: (returnval){ [ 1576.784025] env[62405]: value = "task-1946974" [ 1576.784025] env[62405]: _type = "Task" [ 1576.784025] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1576.799262] env[62405]: DEBUG oslo_vmware.api [None req-588d5a69-c10c-4bb0-a7e8-764f960d4d1a tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Task: {'id': task-1946974, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1576.807624] env[62405]: DEBUG oslo_vmware.api [None req-0885717b-13bf-4773-b81e-724d074ce58c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Task: {'id': task-1946971, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1576.829769] env[62405]: DEBUG nova.network.neutron [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 900b95b5-fe5a-46c1-909a-f81b82ced0ef] Successfully created port: cf9dc646-dc3f-46c3-9291-5f2caa585662 {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1576.970306] env[62405]: DEBUG oslo_vmware.api [None req-2a532060-fb07-4421-b55c-c37fca490a6c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1946970, 'name': CloneVM_Task, 'duration_secs': 2.465103} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1576.971729] env[62405]: INFO nova.virt.vmwareapi.vmops [None req-2a532060-fb07-4421-b55c-c37fca490a6c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: ca4d11fe-1d0f-468b-a2f4-21c5b84342ab] Created linked-clone VM from snapshot [ 1576.974553] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9326b0f1-966f-409a-b424-f7ef330f7ef7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.988011] env[62405]: DEBUG nova.virt.vmwareapi.images [None req-2a532060-fb07-4421-b55c-c37fca490a6c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: ca4d11fe-1d0f-468b-a2f4-21c5b84342ab] Uploading image ec0993f0-0095-4523-861c-992c53a631c4 {{(pid=62405) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1577.021852] env[62405]: DEBUG oslo_vmware.rw_handles [None req-2a532060-fb07-4421-b55c-c37fca490a6c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1577.021852] env[62405]: value = "vm-401374" [ 1577.021852] env[62405]: _type = "VirtualMachine" [ 1577.021852] env[62405]: }. {{(pid=62405) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1577.022879] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-723fab6b-ffdd-4feb-b770-a17e2304a0b3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.030802] env[62405]: DEBUG oslo_vmware.rw_handles [None req-2a532060-fb07-4421-b55c-c37fca490a6c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Lease: (returnval){ [ 1577.030802] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5212f921-cd2f-d559-2310-fa2f792ad781" [ 1577.030802] env[62405]: _type = "HttpNfcLease" [ 1577.030802] env[62405]: } obtained for exporting VM: (result){ [ 1577.030802] env[62405]: value = "vm-401374" [ 1577.030802] env[62405]: _type = "VirtualMachine" [ 1577.030802] env[62405]: }. {{(pid=62405) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1577.031191] env[62405]: DEBUG oslo_vmware.api [None req-2a532060-fb07-4421-b55c-c37fca490a6c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Waiting for the lease: (returnval){ [ 1577.031191] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5212f921-cd2f-d559-2310-fa2f792ad781" [ 1577.031191] env[62405]: _type = "HttpNfcLease" [ 1577.031191] env[62405]: } to be ready. {{(pid=62405) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1577.040353] env[62405]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1577.040353] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5212f921-cd2f-d559-2310-fa2f792ad781" [ 1577.040353] env[62405]: _type = "HttpNfcLease" [ 1577.040353] env[62405]: } is initializing. {{(pid=62405) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1577.095373] env[62405]: DEBUG nova.scheduler.client.report [None req-dab938bf-c818-479f-b806-6c3c4b80f244 tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1577.168444] env[62405]: DEBUG oslo_concurrency.lockutils [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Releasing lock "refresh_cache-14dab775-19b4-4d0d-a7ee-67705f7e45ca" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1577.169110] env[62405]: DEBUG nova.compute.manager [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] [instance: 14dab775-19b4-4d0d-a7ee-67705f7e45ca] Instance network_info: |[{"id": "b2b20164-38d1-48ac-a12b-c190f4aa9d22", "address": "fa:16:3e:26:24:2b", "network": {"id": "a2e514c6-e752-472b-8b89-ef96dce4843e", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-614451256-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "64c9c25765314168a99388fa3472e5c9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cc0a33d-17c0-4b87-b48f-413a87a4cc6a", "external-id": "nsx-vlan-transportzone-865", "segmentation_id": 865, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb2b20164-38", "ovs_interfaceid": "b2b20164-38d1-48ac-a12b-c190f4aa9d22", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1577.169250] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] [instance: 14dab775-19b4-4d0d-a7ee-67705f7e45ca] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:26:24:2b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3cc0a33d-17c0-4b87-b48f-413a87a4cc6a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b2b20164-38d1-48ac-a12b-c190f4aa9d22', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1577.182119] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Creating folder: Project (64c9c25765314168a99388fa3472e5c9). Parent ref: group-v401284. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1577.182119] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-82b85fab-9f31-4bbc-9c40-c3b997a51bc3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.195294] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Created folder: Project (64c9c25765314168a99388fa3472e5c9) in parent group-v401284. [ 1577.195544] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Creating folder: Instances. Parent ref: group-v401375. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1577.195750] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2bcfa76e-616b-41e5-9d7e-eaaa9ad96f53 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.206806] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Created folder: Instances in parent group-v401375. [ 1577.207086] env[62405]: DEBUG oslo.service.loopingcall [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1577.207299] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 14dab775-19b4-4d0d-a7ee-67705f7e45ca] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1577.207532] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0334422d-78c0-406a-92dd-4ac28e28bfc7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.233150] env[62405]: DEBUG oslo_concurrency.lockutils [req-d3e08ffb-0c96-4e6e-8316-d1bd35922bcb req-3522d00b-7c9e-4f62-9a8d-4d7eedff6437 service nova] Releasing lock "refresh_cache-65462c7a-372e-4ba6-8f6d-e300080d65d0" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1577.233883] env[62405]: DEBUG oslo_concurrency.lockutils [req-5acc32b5-679d-457f-8d92-28e0776fe88a req-05c778b0-f4c3-4199-ae96-b53df555a043 service nova] Acquired lock "refresh_cache-65462c7a-372e-4ba6-8f6d-e300080d65d0" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1577.234037] env[62405]: DEBUG nova.network.neutron [req-5acc32b5-679d-457f-8d92-28e0776fe88a req-05c778b0-f4c3-4199-ae96-b53df555a043 service nova] [instance: 65462c7a-372e-4ba6-8f6d-e300080d65d0] Refreshing network info cache for port 62da0bb8-4a2d-4e69-a4da-3970ca057cad {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1577.239083] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1577.239083] env[62405]: value = "task-1946978" [ 1577.239083] env[62405]: _type = "Task" [ 1577.239083] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1577.248564] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946978, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1577.252238] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1577.298262] env[62405]: DEBUG oslo_vmware.api [None req-588d5a69-c10c-4bb0-a7e8-764f960d4d1a tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Task: {'id': task-1946974, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1577.316676] env[62405]: DEBUG oslo_vmware.api [None req-0885717b-13bf-4773-b81e-724d074ce58c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Task: {'id': task-1946971, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1577.371829] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-34e409f5-4968-4fce-8e2c-e9aa257078cb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: e8ed73c3-fb86-42c3-aae6-b0c8d03149ce] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1577.372214] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-aae10167-0c0d-44dd-afc8-e6f4dcc8f5b7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.381726] env[62405]: DEBUG oslo_vmware.api [None req-34e409f5-4968-4fce-8e2c-e9aa257078cb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for the task: (returnval){ [ 1577.381726] env[62405]: value = "task-1946979" [ 1577.381726] env[62405]: _type = "Task" [ 1577.381726] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1577.393712] env[62405]: DEBUG oslo_vmware.api [None req-34e409f5-4968-4fce-8e2c-e9aa257078cb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1946979, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1577.437529] env[62405]: DEBUG nova.compute.manager [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 900b95b5-fe5a-46c1-909a-f81b82ced0ef] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1577.471551] env[62405]: DEBUG nova.virt.hardware [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1577.471911] env[62405]: DEBUG nova.virt.hardware [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1577.472249] env[62405]: DEBUG nova.virt.hardware [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1577.472521] env[62405]: DEBUG nova.virt.hardware [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1577.472710] env[62405]: DEBUG nova.virt.hardware [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1577.472882] env[62405]: DEBUG nova.virt.hardware [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1577.473140] env[62405]: DEBUG nova.virt.hardware [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1577.473327] env[62405]: DEBUG nova.virt.hardware [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1577.473526] env[62405]: DEBUG nova.virt.hardware [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1577.473785] env[62405]: DEBUG nova.virt.hardware [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1577.473978] env[62405]: DEBUG nova.virt.hardware [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1577.474955] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3791df4f-854c-4498-9fa1-dbea6fa4a392 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.483976] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27d88827-8903-4c14-bb1e-5d582dbcbb5e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.542023] env[62405]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1577.542023] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5212f921-cd2f-d559-2310-fa2f792ad781" [ 1577.542023] env[62405]: _type = "HttpNfcLease" [ 1577.542023] env[62405]: } is ready. {{(pid=62405) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1577.542023] env[62405]: DEBUG oslo_vmware.rw_handles [None req-2a532060-fb07-4421-b55c-c37fca490a6c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1577.542023] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5212f921-cd2f-d559-2310-fa2f792ad781" [ 1577.542023] env[62405]: _type = "HttpNfcLease" [ 1577.542023] env[62405]: }. {{(pid=62405) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1577.542023] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6baca5f9-1109-4ddc-be2c-baa209138256 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.550949] env[62405]: DEBUG oslo_vmware.rw_handles [None req-2a532060-fb07-4421-b55c-c37fca490a6c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52ce8ce7-9f55-0035-8ca8-58f363d0db45/disk-0.vmdk from lease info. {{(pid=62405) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1577.551171] env[62405]: DEBUG oslo_vmware.rw_handles [None req-2a532060-fb07-4421-b55c-c37fca490a6c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52ce8ce7-9f55-0035-8ca8-58f363d0db45/disk-0.vmdk for reading. {{(pid=62405) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1577.611152] env[62405]: DEBUG oslo_concurrency.lockutils [None req-dab938bf-c818-479f-b806-6c3c4b80f244 tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.195s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1577.617102] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.071s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1577.621632] env[62405]: INFO nova.compute.claims [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] [instance: 78b4c6ea-6f5b-40d8-8c4a-10332f176e0b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1577.649239] env[62405]: INFO nova.scheduler.client.report [None req-dab938bf-c818-479f-b806-6c3c4b80f244 tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Deleted allocations for instance 7db1b086-942e-4890-8750-0d717e522786 [ 1577.663034] env[62405]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-b8046c06-2736-4445-a2c1-de8b999ac7d7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.749711] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946978, 'name': CreateVM_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1577.795373] env[62405]: DEBUG oslo_vmware.api [None req-588d5a69-c10c-4bb0-a7e8-764f960d4d1a tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Task: {'id': task-1946974, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.715307} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1577.795677] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-588d5a69-c10c-4bb0-a7e8-764f960d4d1a tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1577.795875] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-588d5a69-c10c-4bb0-a7e8-764f960d4d1a tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] [instance: 65462c7a-372e-4ba6-8f6d-e300080d65d0] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1577.796118] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-588d5a69-c10c-4bb0-a7e8-764f960d4d1a tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] [instance: 65462c7a-372e-4ba6-8f6d-e300080d65d0] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1577.796322] env[62405]: INFO nova.compute.manager [None req-588d5a69-c10c-4bb0-a7e8-764f960d4d1a tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] [instance: 65462c7a-372e-4ba6-8f6d-e300080d65d0] Took 1.63 seconds to destroy the instance on the hypervisor. [ 1577.796593] env[62405]: DEBUG oslo.service.loopingcall [None req-588d5a69-c10c-4bb0-a7e8-764f960d4d1a tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1577.796804] env[62405]: DEBUG nova.compute.manager [-] [instance: 65462c7a-372e-4ba6-8f6d-e300080d65d0] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1577.796994] env[62405]: DEBUG nova.network.neutron [-] [instance: 65462c7a-372e-4ba6-8f6d-e300080d65d0] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1577.812485] env[62405]: DEBUG oslo_vmware.api [None req-0885717b-13bf-4773-b81e-724d074ce58c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Task: {'id': task-1946971, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1577.893561] env[62405]: DEBUG oslo_vmware.api [None req-34e409f5-4968-4fce-8e2c-e9aa257078cb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1946979, 'name': PowerOffVM_Task, 'duration_secs': 0.29133} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1577.893999] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-34e409f5-4968-4fce-8e2c-e9aa257078cb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: e8ed73c3-fb86-42c3-aae6-b0c8d03149ce] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1577.895206] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-34e409f5-4968-4fce-8e2c-e9aa257078cb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: e8ed73c3-fb86-42c3-aae6-b0c8d03149ce] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1577.895206] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91a4ce5f-277d-4b36-8db5-b82ecf965ad9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.905083] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-34e409f5-4968-4fce-8e2c-e9aa257078cb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: e8ed73c3-fb86-42c3-aae6-b0c8d03149ce] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1577.905083] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2b43240b-bb23-40e1-a899-b741a6cfeb28 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.991026] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-34e409f5-4968-4fce-8e2c-e9aa257078cb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: e8ed73c3-fb86-42c3-aae6-b0c8d03149ce] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1577.991026] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-34e409f5-4968-4fce-8e2c-e9aa257078cb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: e8ed73c3-fb86-42c3-aae6-b0c8d03149ce] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1577.991026] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-34e409f5-4968-4fce-8e2c-e9aa257078cb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Deleting the datastore file [datastore1] e8ed73c3-fb86-42c3-aae6-b0c8d03149ce {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1577.991026] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4da3ccc4-e7e0-4b6a-957d-1a75b2c0b435 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.997270] env[62405]: DEBUG oslo_vmware.api [None req-34e409f5-4968-4fce-8e2c-e9aa257078cb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for the task: (returnval){ [ 1577.997270] env[62405]: value = "task-1946981" [ 1577.997270] env[62405]: _type = "Task" [ 1577.997270] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1578.008098] env[62405]: DEBUG oslo_vmware.api [None req-34e409f5-4968-4fce-8e2c-e9aa257078cb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1946981, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1578.136650] env[62405]: DEBUG nova.compute.manager [req-29cbc3b2-b413-4460-bcea-4178c2905fa4 req-a1534ad5-b699-4c11-a388-486a4ee79e20 service nova] [instance: 262424b0-dc7d-4b6c-9539-2d6cd23a93da] Received event network-changed-0974798a-a146-421e-a104-caeb56db51b3 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1578.136919] env[62405]: DEBUG nova.compute.manager [req-29cbc3b2-b413-4460-bcea-4178c2905fa4 req-a1534ad5-b699-4c11-a388-486a4ee79e20 service nova] [instance: 262424b0-dc7d-4b6c-9539-2d6cd23a93da] Refreshing instance network info cache due to event network-changed-0974798a-a146-421e-a104-caeb56db51b3. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1578.137851] env[62405]: DEBUG oslo_concurrency.lockutils [req-29cbc3b2-b413-4460-bcea-4178c2905fa4 req-a1534ad5-b699-4c11-a388-486a4ee79e20 service nova] Acquiring lock "refresh_cache-262424b0-dc7d-4b6c-9539-2d6cd23a93da" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1578.138016] env[62405]: DEBUG oslo_concurrency.lockutils [req-29cbc3b2-b413-4460-bcea-4178c2905fa4 req-a1534ad5-b699-4c11-a388-486a4ee79e20 service nova] Acquired lock "refresh_cache-262424b0-dc7d-4b6c-9539-2d6cd23a93da" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1578.138222] env[62405]: DEBUG nova.network.neutron [req-29cbc3b2-b413-4460-bcea-4178c2905fa4 req-a1534ad5-b699-4c11-a388-486a4ee79e20 service nova] [instance: 262424b0-dc7d-4b6c-9539-2d6cd23a93da] Refreshing network info cache for port 0974798a-a146-421e-a104-caeb56db51b3 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1578.153720] env[62405]: DEBUG nova.compute.manager [req-0aabe5b9-f781-4282-a36c-5a9a7e706cd5 req-c2b371b9-5a3a-4250-87f3-f4f9a22baf79 service nova] [instance: 14dab775-19b4-4d0d-a7ee-67705f7e45ca] Received event network-changed-b2b20164-38d1-48ac-a12b-c190f4aa9d22 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1578.155163] env[62405]: DEBUG nova.compute.manager [req-0aabe5b9-f781-4282-a36c-5a9a7e706cd5 req-c2b371b9-5a3a-4250-87f3-f4f9a22baf79 service nova] [instance: 14dab775-19b4-4d0d-a7ee-67705f7e45ca] Refreshing instance network info cache due to event network-changed-b2b20164-38d1-48ac-a12b-c190f4aa9d22. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1578.155163] env[62405]: DEBUG oslo_concurrency.lockutils [req-0aabe5b9-f781-4282-a36c-5a9a7e706cd5 req-c2b371b9-5a3a-4250-87f3-f4f9a22baf79 service nova] Acquiring lock "refresh_cache-14dab775-19b4-4d0d-a7ee-67705f7e45ca" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1578.155163] env[62405]: DEBUG oslo_concurrency.lockutils [req-0aabe5b9-f781-4282-a36c-5a9a7e706cd5 req-c2b371b9-5a3a-4250-87f3-f4f9a22baf79 service nova] Acquired lock "refresh_cache-14dab775-19b4-4d0d-a7ee-67705f7e45ca" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1578.155163] env[62405]: DEBUG nova.network.neutron [req-0aabe5b9-f781-4282-a36c-5a9a7e706cd5 req-c2b371b9-5a3a-4250-87f3-f4f9a22baf79 service nova] [instance: 14dab775-19b4-4d0d-a7ee-67705f7e45ca] Refreshing network info cache for port b2b20164-38d1-48ac-a12b-c190f4aa9d22 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1578.158963] env[62405]: DEBUG oslo_concurrency.lockutils [None req-dab938bf-c818-479f-b806-6c3c4b80f244 tempest-ServersV294TestFqdnHostnames-1456737565 tempest-ServersV294TestFqdnHostnames-1456737565-project-member] Lock "7db1b086-942e-4890-8750-0d717e522786" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.076s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1578.252761] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946978, 'name': CreateVM_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1578.300829] env[62405]: DEBUG nova.network.neutron [req-5acc32b5-679d-457f-8d92-28e0776fe88a req-05c778b0-f4c3-4199-ae96-b53df555a043 service nova] [instance: 65462c7a-372e-4ba6-8f6d-e300080d65d0] Updated VIF entry in instance network info cache for port 62da0bb8-4a2d-4e69-a4da-3970ca057cad. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1578.301278] env[62405]: DEBUG nova.network.neutron [req-5acc32b5-679d-457f-8d92-28e0776fe88a req-05c778b0-f4c3-4199-ae96-b53df555a043 service nova] [instance: 65462c7a-372e-4ba6-8f6d-e300080d65d0] Updating instance_info_cache with network_info: [{"id": "62da0bb8-4a2d-4e69-a4da-3970ca057cad", "address": "fa:16:3e:34:fe:9b", "network": {"id": "869979f7-5a22-4c11-bb77-c48a5d5f934f", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1534576533-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6d1aee7c44f44abc86ed5c15b027e989", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08e9585e-6186-4788-9fd9-24174ce45a6f", "external-id": "nsx-vlan-transportzone-254", "segmentation_id": 254, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap62da0bb8-4a", "ovs_interfaceid": "62da0bb8-4a2d-4e69-a4da-3970ca057cad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1578.318546] env[62405]: DEBUG oslo_vmware.api [None req-0885717b-13bf-4773-b81e-724d074ce58c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Task: {'id': task-1946971, 'name': CreateSnapshot_Task, 'duration_secs': 2.299428} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1578.318950] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-0885717b-13bf-4773-b81e-724d074ce58c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] [instance: fbedaa93-5968-4b42-b93e-201d2b44b32b] Created Snapshot of the VM instance {{(pid=62405) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1578.320181] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4044adf-81de-4d14-9c57-431426b07443 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.510048] env[62405]: DEBUG oslo_vmware.api [None req-34e409f5-4968-4fce-8e2c-e9aa257078cb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1946981, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.356098} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1578.510406] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-34e409f5-4968-4fce-8e2c-e9aa257078cb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1578.510512] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-34e409f5-4968-4fce-8e2c-e9aa257078cb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: e8ed73c3-fb86-42c3-aae6-b0c8d03149ce] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1578.510686] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-34e409f5-4968-4fce-8e2c-e9aa257078cb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: e8ed73c3-fb86-42c3-aae6-b0c8d03149ce] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1578.609427] env[62405]: DEBUG nova.network.neutron [-] [instance: 65462c7a-372e-4ba6-8f6d-e300080d65d0] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1578.751903] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946978, 'name': CreateVM_Task, 'duration_secs': 1.413684} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1578.755672] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 14dab775-19b4-4d0d-a7ee-67705f7e45ca] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1578.757285] env[62405]: DEBUG oslo_concurrency.lockutils [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1578.757460] env[62405]: DEBUG oslo_concurrency.lockutils [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1578.757792] env[62405]: DEBUG oslo_concurrency.lockutils [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1578.758065] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-98474409-e492-4ddd-bf9b-04883f79ae4d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.768360] env[62405]: DEBUG oslo_vmware.api [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Waiting for the task: (returnval){ [ 1578.768360] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]520fed22-dab1-9982-44eb-13a74a42bd6f" [ 1578.768360] env[62405]: _type = "Task" [ 1578.768360] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1578.778469] env[62405]: DEBUG oslo_vmware.api [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]520fed22-dab1-9982-44eb-13a74a42bd6f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1578.811809] env[62405]: DEBUG oslo_concurrency.lockutils [req-5acc32b5-679d-457f-8d92-28e0776fe88a req-05c778b0-f4c3-4199-ae96-b53df555a043 service nova] Releasing lock "refresh_cache-65462c7a-372e-4ba6-8f6d-e300080d65d0" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1578.845295] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-0885717b-13bf-4773-b81e-724d074ce58c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] [instance: fbedaa93-5968-4b42-b93e-201d2b44b32b] Creating linked-clone VM from snapshot {{(pid=62405) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1578.851108] env[62405]: DEBUG nova.network.neutron [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 900b95b5-fe5a-46c1-909a-f81b82ced0ef] Successfully updated port: cf9dc646-dc3f-46c3-9291-5f2caa585662 {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1578.852554] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-8c1ab6dc-7eb4-4861-b31d-499d28d38469 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.867398] env[62405]: DEBUG oslo_vmware.api [None req-0885717b-13bf-4773-b81e-724d074ce58c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Waiting for the task: (returnval){ [ 1578.867398] env[62405]: value = "task-1946982" [ 1578.867398] env[62405]: _type = "Task" [ 1578.867398] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1578.884515] env[62405]: DEBUG oslo_vmware.api [None req-0885717b-13bf-4773-b81e-724d074ce58c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Task: {'id': task-1946982, 'name': CloneVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1578.973214] env[62405]: DEBUG nova.network.neutron [req-29cbc3b2-b413-4460-bcea-4178c2905fa4 req-a1534ad5-b699-4c11-a388-486a4ee79e20 service nova] [instance: 262424b0-dc7d-4b6c-9539-2d6cd23a93da] Updated VIF entry in instance network info cache for port 0974798a-a146-421e-a104-caeb56db51b3. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1578.975559] env[62405]: DEBUG nova.network.neutron [req-29cbc3b2-b413-4460-bcea-4178c2905fa4 req-a1534ad5-b699-4c11-a388-486a4ee79e20 service nova] [instance: 262424b0-dc7d-4b6c-9539-2d6cd23a93da] Updating instance_info_cache with network_info: [{"id": "0974798a-a146-421e-a104-caeb56db51b3", "address": "fa:16:3e:9a:d1:33", "network": {"id": "869979f7-5a22-4c11-bb77-c48a5d5f934f", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1534576533-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.163", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6d1aee7c44f44abc86ed5c15b027e989", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08e9585e-6186-4788-9fd9-24174ce45a6f", "external-id": "nsx-vlan-transportzone-254", "segmentation_id": 254, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0974798a-a1", "ovs_interfaceid": "0974798a-a146-421e-a104-caeb56db51b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1578.980613] env[62405]: DEBUG nova.network.neutron [req-0aabe5b9-f781-4282-a36c-5a9a7e706cd5 req-c2b371b9-5a3a-4250-87f3-f4f9a22baf79 service nova] [instance: 14dab775-19b4-4d0d-a7ee-67705f7e45ca] Updated VIF entry in instance network info cache for port b2b20164-38d1-48ac-a12b-c190f4aa9d22. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1578.981074] env[62405]: DEBUG nova.network.neutron [req-0aabe5b9-f781-4282-a36c-5a9a7e706cd5 req-c2b371b9-5a3a-4250-87f3-f4f9a22baf79 service nova] [instance: 14dab775-19b4-4d0d-a7ee-67705f7e45ca] Updating instance_info_cache with network_info: [{"id": "b2b20164-38d1-48ac-a12b-c190f4aa9d22", "address": "fa:16:3e:26:24:2b", "network": {"id": "a2e514c6-e752-472b-8b89-ef96dce4843e", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-614451256-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "64c9c25765314168a99388fa3472e5c9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cc0a33d-17c0-4b87-b48f-413a87a4cc6a", "external-id": "nsx-vlan-transportzone-865", "segmentation_id": 865, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb2b20164-38", "ovs_interfaceid": "b2b20164-38d1-48ac-a12b-c190f4aa9d22", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1579.112145] env[62405]: INFO nova.compute.manager [-] [instance: 65462c7a-372e-4ba6-8f6d-e300080d65d0] Took 1.32 seconds to deallocate network for instance. [ 1579.281715] env[62405]: DEBUG oslo_vmware.api [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]520fed22-dab1-9982-44eb-13a74a42bd6f, 'name': SearchDatastore_Task, 'duration_secs': 0.022492} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1579.282077] env[62405]: DEBUG oslo_concurrency.lockutils [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1579.282317] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] [instance: 14dab775-19b4-4d0d-a7ee-67705f7e45ca] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1579.282552] env[62405]: DEBUG oslo_concurrency.lockutils [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1579.282697] env[62405]: DEBUG oslo_concurrency.lockutils [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1579.282879] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1579.283156] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-66bdc35c-cbf2-4751-adca-7800acd0d61a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.286734] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5a4ef79-d9b0-4995-8d4d-d33a7c0de24f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.294631] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a194ee26-eebc-4d9e-ad00-66aac3d49a08 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.299144] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1579.299323] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1579.300364] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-350fa62c-96ba-4716-afcf-72486d5614ea {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.328985] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8e63380-074a-44f3-883f-e51fbc371a91 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.333043] env[62405]: DEBUG oslo_vmware.api [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Waiting for the task: (returnval){ [ 1579.333043] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52164917-6e9e-14cb-51af-feaa8befc2de" [ 1579.333043] env[62405]: _type = "Task" [ 1579.333043] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1579.340090] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cc47a4e-dc66-4131-8c8d-6e7734e1c664 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.347269] env[62405]: DEBUG oslo_vmware.api [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52164917-6e9e-14cb-51af-feaa8befc2de, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1579.357673] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquiring lock "refresh_cache-900b95b5-fe5a-46c1-909a-f81b82ced0ef" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1579.357814] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquired lock "refresh_cache-900b95b5-fe5a-46c1-909a-f81b82ced0ef" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1579.357960] env[62405]: DEBUG nova.network.neutron [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 900b95b5-fe5a-46c1-909a-f81b82ced0ef] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1579.359356] env[62405]: DEBUG nova.compute.provider_tree [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1579.377520] env[62405]: DEBUG oslo_vmware.api [None req-0885717b-13bf-4773-b81e-724d074ce58c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Task: {'id': task-1946982, 'name': CloneVM_Task} progress is 94%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1579.480143] env[62405]: DEBUG oslo_concurrency.lockutils [req-29cbc3b2-b413-4460-bcea-4178c2905fa4 req-a1534ad5-b699-4c11-a388-486a4ee79e20 service nova] Releasing lock "refresh_cache-262424b0-dc7d-4b6c-9539-2d6cd23a93da" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1579.487040] env[62405]: DEBUG oslo_concurrency.lockutils [req-0aabe5b9-f781-4282-a36c-5a9a7e706cd5 req-c2b371b9-5a3a-4250-87f3-f4f9a22baf79 service nova] Releasing lock "refresh_cache-14dab775-19b4-4d0d-a7ee-67705f7e45ca" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1579.622478] env[62405]: DEBUG oslo_concurrency.lockutils [None req-588d5a69-c10c-4bb0-a7e8-764f960d4d1a tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1579.844825] env[62405]: DEBUG oslo_vmware.api [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52164917-6e9e-14cb-51af-feaa8befc2de, 'name': SearchDatastore_Task, 'duration_secs': 0.014566} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1579.845683] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e5350c57-800f-4cda-ad79-0ef708ca7106 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.851334] env[62405]: DEBUG oslo_vmware.api [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Waiting for the task: (returnval){ [ 1579.851334] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]524b23fc-b605-fc0e-c7b8-f6670e1b7bf6" [ 1579.851334] env[62405]: _type = "Task" [ 1579.851334] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1579.859790] env[62405]: DEBUG oslo_vmware.api [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]524b23fc-b605-fc0e-c7b8-f6670e1b7bf6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1579.863482] env[62405]: DEBUG nova.scheduler.client.report [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1579.876533] env[62405]: DEBUG oslo_vmware.api [None req-0885717b-13bf-4773-b81e-724d074ce58c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Task: {'id': task-1946982, 'name': CloneVM_Task} progress is 94%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1579.906793] env[62405]: DEBUG nova.network.neutron [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 900b95b5-fe5a-46c1-909a-f81b82ced0ef] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1580.058834] env[62405]: DEBUG nova.network.neutron [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 900b95b5-fe5a-46c1-909a-f81b82ced0ef] Updating instance_info_cache with network_info: [{"id": "cf9dc646-dc3f-46c3-9291-5f2caa585662", "address": "fa:16:3e:e8:3d:03", "network": {"id": "7398b956-045a-440c-b8fc-34bad57dbc27", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1969082667-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "41626e27199f4370a2554bb243a72d41", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "50171613-b419-45e3-9ada-fcb6cd921428", "external-id": "nsx-vlan-transportzone-914", "segmentation_id": 914, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf9dc646-dc", "ovs_interfaceid": "cf9dc646-dc3f-46c3-9291-5f2caa585662", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1580.362062] env[62405]: DEBUG oslo_vmware.api [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]524b23fc-b605-fc0e-c7b8-f6670e1b7bf6, 'name': SearchDatastore_Task, 'duration_secs': 0.0325} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1580.362365] env[62405]: DEBUG oslo_concurrency.lockutils [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1580.362628] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 14dab775-19b4-4d0d-a7ee-67705f7e45ca/14dab775-19b4-4d0d-a7ee-67705f7e45ca.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1580.362908] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f28675c0-8894-437a-b7bc-589dfecb15b8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.370081] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.755s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1580.370614] env[62405]: DEBUG nova.compute.manager [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] [instance: 78b4c6ea-6f5b-40d8-8c4a-10332f176e0b] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1580.374536] env[62405]: DEBUG oslo_vmware.api [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Waiting for the task: (returnval){ [ 1580.374536] env[62405]: value = "task-1946983" [ 1580.374536] env[62405]: _type = "Task" [ 1580.374536] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1580.375210] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5294a7b2-6b91-46a0-9297-5e141d4b9642 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 28.415s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1580.393808] env[62405]: DEBUG oslo_vmware.api [None req-0885717b-13bf-4773-b81e-724d074ce58c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Task: {'id': task-1946982, 'name': CloneVM_Task} progress is 100%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1580.398411] env[62405]: DEBUG oslo_vmware.api [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Task: {'id': task-1946983, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1580.562072] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Releasing lock "refresh_cache-900b95b5-fe5a-46c1-909a-f81b82ced0ef" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1580.562564] env[62405]: DEBUG nova.compute.manager [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 900b95b5-fe5a-46c1-909a-f81b82ced0ef] Instance network_info: |[{"id": "cf9dc646-dc3f-46c3-9291-5f2caa585662", "address": "fa:16:3e:e8:3d:03", "network": {"id": "7398b956-045a-440c-b8fc-34bad57dbc27", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1969082667-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "41626e27199f4370a2554bb243a72d41", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "50171613-b419-45e3-9ada-fcb6cd921428", "external-id": "nsx-vlan-transportzone-914", "segmentation_id": 914, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf9dc646-dc", "ovs_interfaceid": "cf9dc646-dc3f-46c3-9291-5f2caa585662", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1580.563177] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 900b95b5-fe5a-46c1-909a-f81b82ced0ef] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e8:3d:03', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '50171613-b419-45e3-9ada-fcb6cd921428', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cf9dc646-dc3f-46c3-9291-5f2caa585662', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1580.573231] env[62405]: DEBUG oslo.service.loopingcall [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1580.573565] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 900b95b5-fe5a-46c1-909a-f81b82ced0ef] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1580.574444] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dc8bf3d5-0cd3-4fef-a2fd-6fde45654b99 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.599437] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1580.599437] env[62405]: value = "task-1946985" [ 1580.599437] env[62405]: _type = "Task" [ 1580.599437] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1580.615384] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946985, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1580.879612] env[62405]: DEBUG nova.compute.utils [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1580.881239] env[62405]: DEBUG nova.compute.manager [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] [instance: 78b4c6ea-6f5b-40d8-8c4a-10332f176e0b] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1580.881482] env[62405]: DEBUG nova.network.neutron [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] [instance: 78b4c6ea-6f5b-40d8-8c4a-10332f176e0b] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1580.894579] env[62405]: DEBUG oslo_vmware.api [None req-0885717b-13bf-4773-b81e-724d074ce58c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Task: {'id': task-1946982, 'name': CloneVM_Task, 'duration_secs': 1.541376} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1580.895525] env[62405]: INFO nova.virt.vmwareapi.vmops [None req-0885717b-13bf-4773-b81e-724d074ce58c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] [instance: fbedaa93-5968-4b42-b93e-201d2b44b32b] Created linked-clone VM from snapshot [ 1580.896808] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa22b2db-f610-4543-95c7-19351e972127 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.903381] env[62405]: DEBUG oslo_vmware.api [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Task: {'id': task-1946983, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1580.910014] env[62405]: DEBUG nova.virt.vmwareapi.images [None req-0885717b-13bf-4773-b81e-724d074ce58c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] [instance: fbedaa93-5968-4b42-b93e-201d2b44b32b] Uploading image ef4be1c0-1cd8-4e45-9137-1b211391ee06 {{(pid=62405) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1580.959052] env[62405]: DEBUG nova.policy [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4776ce09f9114ddb9bc1e4c03b8e0512', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3e41926f72174671982ba0d6c4b0f2d7', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1581.113402] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946985, 'name': CreateVM_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1581.310734] env[62405]: DEBUG nova.network.neutron [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] [instance: 78b4c6ea-6f5b-40d8-8c4a-10332f176e0b] Successfully created port: 95874447-5114-44c9-8785-0134bd6173f2 {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1581.362238] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-099a36e0-46a3-45cc-a5a2-a18b747fbee6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.370132] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c003e07-f01b-45d3-96b5-7935565c2323 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.401804] env[62405]: DEBUG nova.compute.manager [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] [instance: 78b4c6ea-6f5b-40d8-8c4a-10332f176e0b] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1581.408418] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62d9eb12-e2f6-4021-9106-6d705d07b3c5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.416173] env[62405]: DEBUG oslo_vmware.api [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Task: {'id': task-1946983, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.852073} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1581.418405] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 14dab775-19b4-4d0d-a7ee-67705f7e45ca/14dab775-19b4-4d0d-a7ee-67705f7e45ca.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1581.418643] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] [instance: 14dab775-19b4-4d0d-a7ee-67705f7e45ca] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1581.418943] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-52485e56-58f0-4343-8453-888be55d5251 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.421788] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ee3f4bb-cc2a-4a85-8073-7cc654f7cb39 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.436786] env[62405]: DEBUG nova.compute.provider_tree [None req-5294a7b2-6b91-46a0-9297-5e141d4b9642 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1581.442212] env[62405]: DEBUG oslo_vmware.api [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Waiting for the task: (returnval){ [ 1581.442212] env[62405]: value = "task-1946986" [ 1581.442212] env[62405]: _type = "Task" [ 1581.442212] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1581.449534] env[62405]: DEBUG oslo_vmware.api [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Task: {'id': task-1946986, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1581.611108] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946985, 'name': CreateVM_Task, 'duration_secs': 0.571602} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1581.611333] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 900b95b5-fe5a-46c1-909a-f81b82ced0ef] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1581.612249] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1581.612336] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1581.612685] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1581.612982] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4bd75b20-fbaa-4bba-b29f-a14aa77ac543 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.618187] env[62405]: DEBUG oslo_vmware.api [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for the task: (returnval){ [ 1581.618187] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52407d55-5d1a-6c65-9ede-d9f9b14d2b64" [ 1581.618187] env[62405]: _type = "Task" [ 1581.618187] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1581.626407] env[62405]: DEBUG oslo_vmware.api [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52407d55-5d1a-6c65-9ede-d9f9b14d2b64, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1581.942302] env[62405]: DEBUG nova.scheduler.client.report [None req-5294a7b2-6b91-46a0-9297-5e141d4b9642 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1581.955912] env[62405]: DEBUG oslo_vmware.api [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Task: {'id': task-1946986, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.112517} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1581.956412] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] [instance: 14dab775-19b4-4d0d-a7ee-67705f7e45ca] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1581.958320] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64fc6482-e3e9-4ad5-9f06-d750edf784d3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.983597] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] [instance: 14dab775-19b4-4d0d-a7ee-67705f7e45ca] Reconfiguring VM instance instance-0000001d to attach disk [datastore1] 14dab775-19b4-4d0d-a7ee-67705f7e45ca/14dab775-19b4-4d0d-a7ee-67705f7e45ca.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1581.984037] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0fb38146-6be3-41f2-8c1b-0d12812b060a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.009092] env[62405]: DEBUG oslo_vmware.api [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Waiting for the task: (returnval){ [ 1582.009092] env[62405]: value = "task-1946987" [ 1582.009092] env[62405]: _type = "Task" [ 1582.009092] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1582.017935] env[62405]: DEBUG oslo_vmware.api [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Task: {'id': task-1946987, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1582.132606] env[62405]: DEBUG oslo_vmware.api [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52407d55-5d1a-6c65-9ede-d9f9b14d2b64, 'name': SearchDatastore_Task, 'duration_secs': 0.015819} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1582.133729] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1582.133729] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 900b95b5-fe5a-46c1-909a-f81b82ced0ef] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1582.133729] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1582.133937] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1582.134238] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1582.134667] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-80c154dd-596e-4b47-9c4c-8f4220dfe614 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.147301] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1582.147618] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1582.148663] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c814c4d6-ccb1-4d43-bd40-16e1cfb3036f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.155583] env[62405]: DEBUG oslo_vmware.api [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for the task: (returnval){ [ 1582.155583] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5295cfae-9292-5214-c6ae-13f8cd3c0515" [ 1582.155583] env[62405]: _type = "Task" [ 1582.155583] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1582.167583] env[62405]: DEBUG oslo_vmware.api [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5295cfae-9292-5214-c6ae-13f8cd3c0515, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1582.411959] env[62405]: DEBUG nova.compute.manager [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] [instance: 78b4c6ea-6f5b-40d8-8c4a-10332f176e0b] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1582.519582] env[62405]: DEBUG oslo_vmware.api [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Task: {'id': task-1946987, 'name': ReconfigVM_Task, 'duration_secs': 0.315112} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1582.519849] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] [instance: 14dab775-19b4-4d0d-a7ee-67705f7e45ca] Reconfigured VM instance instance-0000001d to attach disk [datastore1] 14dab775-19b4-4d0d-a7ee-67705f7e45ca/14dab775-19b4-4d0d-a7ee-67705f7e45ca.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1582.520417] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0686612f-348a-4511-8eba-c51771b05e2f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.526645] env[62405]: DEBUG oslo_vmware.api [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Waiting for the task: (returnval){ [ 1582.526645] env[62405]: value = "task-1946988" [ 1582.526645] env[62405]: _type = "Task" [ 1582.526645] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1582.537060] env[62405]: DEBUG oslo_vmware.api [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Task: {'id': task-1946988, 'name': Rename_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1582.670221] env[62405]: DEBUG oslo_vmware.api [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5295cfae-9292-5214-c6ae-13f8cd3c0515, 'name': SearchDatastore_Task, 'duration_secs': 0.044485} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1582.671586] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-48841997-2273-48f1-8839-b5485e08f43c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.678815] env[62405]: DEBUG oslo_vmware.api [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for the task: (returnval){ [ 1582.678815] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]527074c6-9898-b250-9fa2-b6ac379f6234" [ 1582.678815] env[62405]: _type = "Task" [ 1582.678815] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1582.687298] env[62405]: DEBUG oslo_vmware.api [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]527074c6-9898-b250-9fa2-b6ac379f6234, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1583.036978] env[62405]: DEBUG oslo_vmware.api [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Task: {'id': task-1946988, 'name': Rename_Task, 'duration_secs': 0.156785} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1583.037392] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] [instance: 14dab775-19b4-4d0d-a7ee-67705f7e45ca] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1583.037469] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4147201b-4543-4adc-9cb8-3e2dc91e24ce {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.044727] env[62405]: DEBUG oslo_vmware.api [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Waiting for the task: (returnval){ [ 1583.044727] env[62405]: value = "task-1946989" [ 1583.044727] env[62405]: _type = "Task" [ 1583.044727] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1583.052658] env[62405]: DEBUG oslo_vmware.api [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Task: {'id': task-1946989, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1583.190347] env[62405]: DEBUG oslo_vmware.api [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]527074c6-9898-b250-9fa2-b6ac379f6234, 'name': SearchDatastore_Task, 'duration_secs': 0.014964} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1583.190616] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1583.190981] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 900b95b5-fe5a-46c1-909a-f81b82ced0ef/900b95b5-fe5a-46c1-909a-f81b82ced0ef.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1583.191259] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f0776386-a687-44a6-af3c-f9b65f97fc77 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.198593] env[62405]: DEBUG oslo_vmware.api [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for the task: (returnval){ [ 1583.198593] env[62405]: value = "task-1946990" [ 1583.198593] env[62405]: _type = "Task" [ 1583.198593] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1583.207791] env[62405]: DEBUG oslo_vmware.api [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1946990, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1583.556561] env[62405]: DEBUG oslo_vmware.api [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Task: {'id': task-1946989, 'name': PowerOnVM_Task} progress is 1%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1583.710943] env[62405]: DEBUG oslo_vmware.api [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1946990, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1584.055604] env[62405]: DEBUG oslo_vmware.api [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Task: {'id': task-1946989, 'name': PowerOnVM_Task} progress is 37%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1584.209999] env[62405]: DEBUG oslo_vmware.api [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1946990, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.630087} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1584.210339] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 900b95b5-fe5a-46c1-909a-f81b82ced0ef/900b95b5-fe5a-46c1-909a-f81b82ced0ef.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1584.210548] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 900b95b5-fe5a-46c1-909a-f81b82ced0ef] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1584.210789] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9b564835-64c9-4780-8cff-92b53bb277d8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.216922] env[62405]: DEBUG oslo_vmware.api [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for the task: (returnval){ [ 1584.216922] env[62405]: value = "task-1946991" [ 1584.216922] env[62405]: _type = "Task" [ 1584.216922] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1584.225254] env[62405]: DEBUG oslo_vmware.api [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1946991, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1584.557371] env[62405]: DEBUG oslo_vmware.api [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Task: {'id': task-1946989, 'name': PowerOnVM_Task} progress is 64%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1584.730994] env[62405]: DEBUG oslo_vmware.api [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1946991, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064281} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1584.731798] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 900b95b5-fe5a-46c1-909a-f81b82ced0ef] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1584.732175] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27317124-9f91-46bc-b115-7afa758b8d68 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.756534] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 900b95b5-fe5a-46c1-909a-f81b82ced0ef] Reconfiguring VM instance instance-0000001e to attach disk [datastore1] 900b95b5-fe5a-46c1-909a-f81b82ced0ef/900b95b5-fe5a-46c1-909a-f81b82ced0ef.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1584.756871] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-235dce9b-a5cf-4497-a8cc-b65420c17b5e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.777277] env[62405]: DEBUG oslo_vmware.api [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for the task: (returnval){ [ 1584.777277] env[62405]: value = "task-1946992" [ 1584.777277] env[62405]: _type = "Task" [ 1584.777277] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1584.786129] env[62405]: DEBUG oslo_vmware.api [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1946992, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1584.841534] env[62405]: DEBUG nova.virt.hardware [None req-34e409f5-4968-4fce-8e2c-e9aa257078cb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1584.841866] env[62405]: DEBUG nova.virt.hardware [None req-34e409f5-4968-4fce-8e2c-e9aa257078cb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1584.842051] env[62405]: DEBUG nova.virt.hardware [None req-34e409f5-4968-4fce-8e2c-e9aa257078cb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1584.842243] env[62405]: DEBUG nova.virt.hardware [None req-34e409f5-4968-4fce-8e2c-e9aa257078cb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1584.842389] env[62405]: DEBUG nova.virt.hardware [None req-34e409f5-4968-4fce-8e2c-e9aa257078cb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1584.842537] env[62405]: DEBUG nova.virt.hardware [None req-34e409f5-4968-4fce-8e2c-e9aa257078cb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1584.842834] env[62405]: DEBUG nova.virt.hardware [None req-34e409f5-4968-4fce-8e2c-e9aa257078cb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1584.843120] env[62405]: DEBUG nova.virt.hardware [None req-34e409f5-4968-4fce-8e2c-e9aa257078cb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1584.843411] env[62405]: DEBUG nova.virt.hardware [None req-34e409f5-4968-4fce-8e2c-e9aa257078cb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1584.843694] env[62405]: DEBUG nova.virt.hardware [None req-34e409f5-4968-4fce-8e2c-e9aa257078cb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1584.844077] env[62405]: DEBUG nova.virt.hardware [None req-34e409f5-4968-4fce-8e2c-e9aa257078cb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1584.846578] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87aca801-9131-43c0-9dff-4c05d337bbec {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.856925] env[62405]: DEBUG oslo_vmware.rw_handles [None req-0885717b-13bf-4773-b81e-724d074ce58c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1584.856925] env[62405]: value = "vm-401379" [ 1584.856925] env[62405]: _type = "VirtualMachine" [ 1584.856925] env[62405]: }. {{(pid=62405) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1584.858194] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a73db41-df13-49a9-9f00-45cd2025d466 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.864016] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-aa37f9dc-d8eb-4a03-9277-663ebdf019b0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.877103] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-34e409f5-4968-4fce-8e2c-e9aa257078cb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: e8ed73c3-fb86-42c3-aae6-b0c8d03149ce] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a2:aa:a6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ca83c3bc-f3ec-42ab-85b3-192512f766f3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ce532b3f-30ef-4d32-b533-7a04d491a6d4', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1584.884742] env[62405]: DEBUG oslo.service.loopingcall [None req-34e409f5-4968-4fce-8e2c-e9aa257078cb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1584.887152] env[62405]: DEBUG nova.virt.hardware [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1584.887413] env[62405]: DEBUG nova.virt.hardware [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1584.887529] env[62405]: DEBUG nova.virt.hardware [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1584.887707] env[62405]: DEBUG nova.virt.hardware [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1584.887878] env[62405]: DEBUG nova.virt.hardware [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1584.888051] env[62405]: DEBUG nova.virt.hardware [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1584.888261] env[62405]: DEBUG nova.virt.hardware [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1584.888419] env[62405]: DEBUG nova.virt.hardware [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1584.888583] env[62405]: DEBUG nova.virt.hardware [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1584.888743] env[62405]: DEBUG nova.virt.hardware [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1584.888919] env[62405]: DEBUG nova.virt.hardware [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1584.890353] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e8ed73c3-fb86-42c3-aae6-b0c8d03149ce] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1584.892317] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99150e08-3777-4eca-bc2b-4d7c31f94d2e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.894927] env[62405]: DEBUG oslo_vmware.rw_handles [None req-0885717b-13bf-4773-b81e-724d074ce58c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Lease: (returnval){ [ 1584.894927] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52a2c0fc-06ab-2e41-1f52-885fa3fee1b8" [ 1584.894927] env[62405]: _type = "HttpNfcLease" [ 1584.894927] env[62405]: } obtained for exporting VM: (result){ [ 1584.894927] env[62405]: value = "vm-401379" [ 1584.894927] env[62405]: _type = "VirtualMachine" [ 1584.894927] env[62405]: }. {{(pid=62405) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1584.895265] env[62405]: DEBUG oslo_vmware.api [None req-0885717b-13bf-4773-b81e-724d074ce58c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Waiting for the lease: (returnval){ [ 1584.895265] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52a2c0fc-06ab-2e41-1f52-885fa3fee1b8" [ 1584.895265] env[62405]: _type = "HttpNfcLease" [ 1584.895265] env[62405]: } to be ready. {{(pid=62405) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1584.895543] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a474edc0-fe41-4779-86df-d35a20f38870 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.911886] env[62405]: DEBUG oslo_vmware.rw_handles [None req-180e9cc9-d85e-4c76-b477-828efa2e0af1 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52954580-95aa-b91f-7efd-c9faf786f682/disk-0.vmdk. {{(pid=62405) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1584.915902] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9b6e840-37b6-4ead-8483-0e6302cbe896 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.922424] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4c811e3-0e59-4b10-8cc4-409c189c3953 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.929589] env[62405]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1584.929589] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52a2c0fc-06ab-2e41-1f52-885fa3fee1b8" [ 1584.929589] env[62405]: _type = "HttpNfcLease" [ 1584.929589] env[62405]: } is ready. {{(pid=62405) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1584.929802] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1584.929802] env[62405]: value = "task-1946994" [ 1584.929802] env[62405]: _type = "Task" [ 1584.929802] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1584.929974] env[62405]: DEBUG oslo_vmware.rw_handles [None req-180e9cc9-d85e-4c76-b477-828efa2e0af1 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52954580-95aa-b91f-7efd-c9faf786f682/disk-0.vmdk is in state: ready. {{(pid=62405) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1584.930153] env[62405]: ERROR oslo_vmware.rw_handles [None req-180e9cc9-d85e-4c76-b477-828efa2e0af1 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52954580-95aa-b91f-7efd-c9faf786f682/disk-0.vmdk due to incomplete transfer. [ 1584.930760] env[62405]: DEBUG oslo_vmware.rw_handles [None req-0885717b-13bf-4773-b81e-724d074ce58c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1584.930760] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52a2c0fc-06ab-2e41-1f52-885fa3fee1b8" [ 1584.930760] env[62405]: _type = "HttpNfcLease" [ 1584.930760] env[62405]: }. {{(pid=62405) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1584.931144] env[62405]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-62596dfc-dacf-4505-91a6-c2ce763d42b4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.940483] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8260ab2-d019-470c-a615-97d05c6aceb1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.948292] env[62405]: DEBUG nova.network.neutron [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] [instance: 78b4c6ea-6f5b-40d8-8c4a-10332f176e0b] Successfully updated port: 95874447-5114-44c9-8785-0134bd6173f2 {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1584.955623] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946994, 'name': CreateVM_Task} progress is 10%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1584.955623] env[62405]: DEBUG oslo_vmware.rw_handles [None req-0885717b-13bf-4773-b81e-724d074ce58c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5297c66f-6669-beff-3eb4-109224c59911/disk-0.vmdk from lease info. {{(pid=62405) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1584.955815] env[62405]: DEBUG oslo_vmware.rw_handles [None req-0885717b-13bf-4773-b81e-724d074ce58c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5297c66f-6669-beff-3eb4-109224c59911/disk-0.vmdk for reading. {{(pid=62405) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1584.959332] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5294a7b2-6b91-46a0-9297-5e141d4b9642 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 4.584s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1584.962049] env[62405]: DEBUG oslo_vmware.rw_handles [None req-180e9cc9-d85e-4c76-b477-828efa2e0af1 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52954580-95aa-b91f-7efd-c9faf786f682/disk-0.vmdk. {{(pid=62405) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1584.962256] env[62405]: DEBUG nova.virt.vmwareapi.images [None req-180e9cc9-d85e-4c76-b477-828efa2e0af1 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b3647042-89a1-4d15-b85e-49a5c8def1d4] Uploaded image dc99f2b6-e635-457f-9283-66df59ba98ab to the Glance image server {{(pid=62405) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1584.964575] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-180e9cc9-d85e-4c76-b477-828efa2e0af1 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b3647042-89a1-4d15-b85e-49a5c8def1d4] Destroying the VM {{(pid=62405) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1584.966516] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 29.062s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1584.966697] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1584.966851] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62405) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1584.967170] env[62405]: DEBUG oslo_concurrency.lockutils [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.863s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1584.968610] env[62405]: INFO nova.compute.claims [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] [instance: 9e73e2ab-1eac-4aca-905f-a8391d3f5a9b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1584.971043] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-3c5513d3-2ac3-49ea-80bf-2a5791a44e5d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.026886] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f9fa3e3-c0e3-480e-9f6d-896d320f4358 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.039545] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f1407f9-944d-4b9f-a4a6-3e98eee3c824 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.044096] env[62405]: DEBUG oslo_vmware.api [None req-180e9cc9-d85e-4c76-b477-828efa2e0af1 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Waiting for the task: (returnval){ [ 1585.044096] env[62405]: value = "task-1946995" [ 1585.044096] env[62405]: _type = "Task" [ 1585.044096] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1585.062071] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9345eb7-ef37-4038-9bc2-b4dc648dd9d9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.067349] env[62405]: DEBUG oslo_vmware.api [None req-180e9cc9-d85e-4c76-b477-828efa2e0af1 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': task-1946995, 'name': Destroy_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1585.071890] env[62405]: DEBUG oslo_vmware.api [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Task: {'id': task-1946989, 'name': PowerOnVM_Task, 'duration_secs': 1.689399} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1585.073667] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] [instance: 14dab775-19b4-4d0d-a7ee-67705f7e45ca] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1585.073907] env[62405]: INFO nova.compute.manager [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] [instance: 14dab775-19b4-4d0d-a7ee-67705f7e45ca] Took 10.44 seconds to spawn the instance on the hypervisor. [ 1585.074144] env[62405]: DEBUG nova.compute.manager [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] [instance: 14dab775-19b4-4d0d-a7ee-67705f7e45ca] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1585.075207] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfedb3d5-4efc-4714-9c09-d8df87552c57 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.078261] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce4835ec-0755-4587-83e3-225bbc435e01 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.113768] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179037MB free_disk=170GB free_vcpus=48 pci_devices=None {{(pid=62405) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1585.113937] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1585.114836] env[62405]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-ba40eda9-e230-498b-827a-7d5186b0d66b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.295072] env[62405]: DEBUG oslo_vmware.api [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1946992, 'name': ReconfigVM_Task, 'duration_secs': 0.268856} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1585.295458] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 900b95b5-fe5a-46c1-909a-f81b82ced0ef] Reconfigured VM instance instance-0000001e to attach disk [datastore1] 900b95b5-fe5a-46c1-909a-f81b82ced0ef/900b95b5-fe5a-46c1-909a-f81b82ced0ef.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1585.296186] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-efdf2892-81c2-4a7d-91ea-0d357a5767fa {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.302244] env[62405]: DEBUG oslo_vmware.api [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for the task: (returnval){ [ 1585.302244] env[62405]: value = "task-1946996" [ 1585.302244] env[62405]: _type = "Task" [ 1585.302244] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1585.312021] env[62405]: DEBUG oslo_vmware.api [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1946996, 'name': Rename_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1585.443178] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1946994, 'name': CreateVM_Task, 'duration_secs': 0.426282} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1585.443319] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e8ed73c3-fb86-42c3-aae6-b0c8d03149ce] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1585.444152] env[62405]: DEBUG oslo_concurrency.lockutils [None req-34e409f5-4968-4fce-8e2c-e9aa257078cb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1585.444875] env[62405]: DEBUG oslo_concurrency.lockutils [None req-34e409f5-4968-4fce-8e2c-e9aa257078cb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1585.445223] env[62405]: DEBUG oslo_concurrency.lockutils [None req-34e409f5-4968-4fce-8e2c-e9aa257078cb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1585.445580] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-41a54330-147c-4997-a9d7-962f8f304943 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.451530] env[62405]: DEBUG oslo_vmware.api [None req-34e409f5-4968-4fce-8e2c-e9aa257078cb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for the task: (returnval){ [ 1585.451530] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]528ab4f3-11db-bc96-685b-2966dc1f8f44" [ 1585.451530] env[62405]: _type = "Task" [ 1585.451530] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1585.462164] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Acquiring lock "refresh_cache-78b4c6ea-6f5b-40d8-8c4a-10332f176e0b" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1585.462623] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Acquired lock "refresh_cache-78b4c6ea-6f5b-40d8-8c4a-10332f176e0b" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1585.463033] env[62405]: DEBUG nova.network.neutron [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] [instance: 78b4c6ea-6f5b-40d8-8c4a-10332f176e0b] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1585.464711] env[62405]: DEBUG oslo_vmware.api [None req-34e409f5-4968-4fce-8e2c-e9aa257078cb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]528ab4f3-11db-bc96-685b-2966dc1f8f44, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1585.526631] env[62405]: INFO nova.scheduler.client.report [None req-5294a7b2-6b91-46a0-9297-5e141d4b9642 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Deleted allocation for migration 80a37923-23e1-4b60-aaf2-72933f6694b9 [ 1585.558307] env[62405]: DEBUG oslo_vmware.api [None req-180e9cc9-d85e-4c76-b477-828efa2e0af1 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': task-1946995, 'name': Destroy_Task} progress is 33%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1585.623591] env[62405]: INFO nova.compute.manager [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] [instance: 14dab775-19b4-4d0d-a7ee-67705f7e45ca] Took 50.17 seconds to build instance. [ 1585.814122] env[62405]: DEBUG oslo_vmware.api [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1946996, 'name': Rename_Task, 'duration_secs': 0.132832} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1585.814641] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 900b95b5-fe5a-46c1-909a-f81b82ced0ef] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1585.815202] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5e79a1c8-e00d-457a-8156-ef614bf0fdf4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.823056] env[62405]: DEBUG oslo_vmware.api [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for the task: (returnval){ [ 1585.823056] env[62405]: value = "task-1946997" [ 1585.823056] env[62405]: _type = "Task" [ 1585.823056] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1585.830666] env[62405]: DEBUG oslo_vmware.api [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1946997, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1585.907025] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Acquiring lock "c392d6f3-b638-4857-826d-760c38b7d291" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1585.907441] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Lock "c392d6f3-b638-4857-826d-760c38b7d291" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1585.962982] env[62405]: DEBUG oslo_vmware.api [None req-34e409f5-4968-4fce-8e2c-e9aa257078cb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]528ab4f3-11db-bc96-685b-2966dc1f8f44, 'name': SearchDatastore_Task, 'duration_secs': 0.01533} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1585.963624] env[62405]: DEBUG oslo_concurrency.lockutils [None req-34e409f5-4968-4fce-8e2c-e9aa257078cb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1585.964394] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-34e409f5-4968-4fce-8e2c-e9aa257078cb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: e8ed73c3-fb86-42c3-aae6-b0c8d03149ce] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1585.964394] env[62405]: DEBUG oslo_concurrency.lockutils [None req-34e409f5-4968-4fce-8e2c-e9aa257078cb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1585.964477] env[62405]: DEBUG oslo_concurrency.lockutils [None req-34e409f5-4968-4fce-8e2c-e9aa257078cb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1585.964667] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-34e409f5-4968-4fce-8e2c-e9aa257078cb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1585.965055] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-66bfb74b-14d8-4bdf-9d2f-a2167b1942ea {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.976156] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-34e409f5-4968-4fce-8e2c-e9aa257078cb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1585.976380] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-34e409f5-4968-4fce-8e2c-e9aa257078cb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1585.977345] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f0d09e52-5e73-40da-beac-2a745c76ab35 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.984076] env[62405]: DEBUG oslo_vmware.api [None req-34e409f5-4968-4fce-8e2c-e9aa257078cb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for the task: (returnval){ [ 1585.984076] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52895b16-b124-8bd8-9d05-d03719890d64" [ 1585.984076] env[62405]: _type = "Task" [ 1585.984076] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1585.992650] env[62405]: DEBUG oslo_vmware.api [None req-34e409f5-4968-4fce-8e2c-e9aa257078cb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52895b16-b124-8bd8-9d05-d03719890d64, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1586.000666] env[62405]: DEBUG nova.network.neutron [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] [instance: 78b4c6ea-6f5b-40d8-8c4a-10332f176e0b] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1586.046427] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5294a7b2-6b91-46a0-9297-5e141d4b9642 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Lock "058682a1-5240-4414-9203-c612ecd12999" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 37.713s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1586.057505] env[62405]: DEBUG oslo_vmware.api [None req-180e9cc9-d85e-4c76-b477-828efa2e0af1 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': task-1946995, 'name': Destroy_Task, 'duration_secs': 0.6043} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1586.057754] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-180e9cc9-d85e-4c76-b477-828efa2e0af1 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b3647042-89a1-4d15-b85e-49a5c8def1d4] Destroyed the VM [ 1586.057988] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-180e9cc9-d85e-4c76-b477-828efa2e0af1 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b3647042-89a1-4d15-b85e-49a5c8def1d4] Deleting Snapshot of the VM instance {{(pid=62405) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1586.058316] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-12c0e8d3-0642-44c4-9376-ff9bba1d5163 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.065367] env[62405]: DEBUG oslo_vmware.api [None req-180e9cc9-d85e-4c76-b477-828efa2e0af1 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Waiting for the task: (returnval){ [ 1586.065367] env[62405]: value = "task-1946998" [ 1586.065367] env[62405]: _type = "Task" [ 1586.065367] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1586.077530] env[62405]: DEBUG oslo_vmware.api [None req-180e9cc9-d85e-4c76-b477-828efa2e0af1 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': task-1946998, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1586.127133] env[62405]: DEBUG oslo_concurrency.lockutils [None req-af8f86be-b7d7-474b-80a8-c8ececdd45a2 tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Lock "14dab775-19b4-4d0d-a7ee-67705f7e45ca" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 62.221s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1586.169277] env[62405]: DEBUG nova.network.neutron [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] [instance: 78b4c6ea-6f5b-40d8-8c4a-10332f176e0b] Updating instance_info_cache with network_info: [{"id": "95874447-5114-44c9-8785-0134bd6173f2", "address": "fa:16:3e:5e:aa:b9", "network": {"id": "9e3e6e3d-df77-428f-b577-e4a42e82ec43", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.171", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "69dc3a146cd14230b1180689e2fea090", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31e77685-b4dd-4810-80ef-24115ea9ea62", "external-id": "nsx-vlan-transportzone-56", "segmentation_id": 56, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap95874447-51", "ovs_interfaceid": "95874447-5114-44c9-8785-0134bd6173f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1586.336280] env[62405]: DEBUG oslo_vmware.api [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1946997, 'name': PowerOnVM_Task} progress is 1%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1586.495515] env[62405]: DEBUG oslo_vmware.api [None req-34e409f5-4968-4fce-8e2c-e9aa257078cb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52895b16-b124-8bd8-9d05-d03719890d64, 'name': SearchDatastore_Task, 'duration_secs': 0.017394} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1586.501937] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1656d3f2-d651-48e7-a9fe-2e8dc570afc7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.514902] env[62405]: DEBUG oslo_vmware.api [None req-34e409f5-4968-4fce-8e2c-e9aa257078cb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for the task: (returnval){ [ 1586.514902] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52314455-1187-dfbb-20a9-78f236fed929" [ 1586.514902] env[62405]: _type = "Task" [ 1586.514902] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1586.532707] env[62405]: DEBUG oslo_vmware.api [None req-34e409f5-4968-4fce-8e2c-e9aa257078cb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52314455-1187-dfbb-20a9-78f236fed929, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1586.535192] env[62405]: DEBUG oslo_vmware.rw_handles [None req-2a532060-fb07-4421-b55c-c37fca490a6c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52ce8ce7-9f55-0035-8ca8-58f363d0db45/disk-0.vmdk. {{(pid=62405) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1586.536074] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8062c3c5-7fbd-4118-af64-e48380936ecb {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.548023] env[62405]: DEBUG oslo_vmware.rw_handles [None req-2a532060-fb07-4421-b55c-c37fca490a6c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52ce8ce7-9f55-0035-8ca8-58f363d0db45/disk-0.vmdk is in state: ready. {{(pid=62405) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1586.548023] env[62405]: ERROR oslo_vmware.rw_handles [None req-2a532060-fb07-4421-b55c-c37fca490a6c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52ce8ce7-9f55-0035-8ca8-58f363d0db45/disk-0.vmdk due to incomplete transfer. [ 1586.548023] env[62405]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-00c09a36-0453-4af2-91c1-f01f374e0c1b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.557102] env[62405]: DEBUG oslo_vmware.rw_handles [None req-2a532060-fb07-4421-b55c-c37fca490a6c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52ce8ce7-9f55-0035-8ca8-58f363d0db45/disk-0.vmdk. {{(pid=62405) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1586.559030] env[62405]: DEBUG nova.virt.vmwareapi.images [None req-2a532060-fb07-4421-b55c-c37fca490a6c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: ca4d11fe-1d0f-468b-a2f4-21c5b84342ab] Uploaded image ec0993f0-0095-4523-861c-992c53a631c4 to the Glance image server {{(pid=62405) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1586.559030] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a532060-fb07-4421-b55c-c37fca490a6c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: ca4d11fe-1d0f-468b-a2f4-21c5b84342ab] Destroying the VM {{(pid=62405) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1586.559731] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-49e55cfb-d426-41e4-ad3d-97c1c07c6396 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.569540] env[62405]: DEBUG oslo_vmware.api [None req-2a532060-fb07-4421-b55c-c37fca490a6c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Waiting for the task: (returnval){ [ 1586.569540] env[62405]: value = "task-1946999" [ 1586.569540] env[62405]: _type = "Task" [ 1586.569540] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1586.586167] env[62405]: DEBUG oslo_vmware.api [None req-180e9cc9-d85e-4c76-b477-828efa2e0af1 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': task-1946998, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1586.589646] env[62405]: DEBUG oslo_vmware.api [None req-2a532060-fb07-4421-b55c-c37fca490a6c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1946999, 'name': Destroy_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1586.592907] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa19b84b-6f5a-45c9-88a6-c58dbe99f23a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.599938] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92f2550a-bdc8-49fd-a0c9-eab3a0e74951 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.634428] env[62405]: DEBUG nova.compute.manager [None req-1d13a535-5f61-4ffa-b78f-a8c9ac45767e tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] [instance: d5686d7c-a73f-4e02-8726-eab8221a0eae] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1586.638263] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95939e30-0a41-4ac8-8bdc-cc9ab0706d4d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.646874] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecd9e416-5c36-498c-812b-1094b58e9da8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.663181] env[62405]: DEBUG nova.compute.provider_tree [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1586.682722] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Releasing lock "refresh_cache-78b4c6ea-6f5b-40d8-8c4a-10332f176e0b" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1586.683332] env[62405]: DEBUG nova.compute.manager [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] [instance: 78b4c6ea-6f5b-40d8-8c4a-10332f176e0b] Instance network_info: |[{"id": "95874447-5114-44c9-8785-0134bd6173f2", "address": "fa:16:3e:5e:aa:b9", "network": {"id": "9e3e6e3d-df77-428f-b577-e4a42e82ec43", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.171", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "69dc3a146cd14230b1180689e2fea090", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31e77685-b4dd-4810-80ef-24115ea9ea62", "external-id": "nsx-vlan-transportzone-56", "segmentation_id": 56, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap95874447-51", "ovs_interfaceid": "95874447-5114-44c9-8785-0134bd6173f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1586.684111] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] [instance: 78b4c6ea-6f5b-40d8-8c4a-10332f176e0b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5e:aa:b9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '31e77685-b4dd-4810-80ef-24115ea9ea62', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '95874447-5114-44c9-8785-0134bd6173f2', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1586.692183] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Creating folder: Project (3e41926f72174671982ba0d6c4b0f2d7). Parent ref: group-v401284. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1586.692793] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-314c9df9-4897-415a-b67f-05c0ddac9373 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.704060] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Created folder: Project (3e41926f72174671982ba0d6c4b0f2d7) in parent group-v401284. [ 1586.704392] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Creating folder: Instances. Parent ref: group-v401382. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1586.704802] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-83cb4c6d-4dc2-42c5-af98-5089cb768a03 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.718151] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Created folder: Instances in parent group-v401382. [ 1586.718151] env[62405]: DEBUG oslo.service.loopingcall [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1586.718151] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 78b4c6ea-6f5b-40d8-8c4a-10332f176e0b] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1586.718151] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9f97be68-9575-44a5-8b39-400da27d2b16 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.739976] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1586.739976] env[62405]: value = "task-1947002" [ 1586.739976] env[62405]: _type = "Task" [ 1586.739976] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1586.753768] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947002, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1586.831913] env[62405]: DEBUG nova.compute.manager [req-614a07fd-f187-471f-86d1-f430909e5c59 req-77fdeab6-de92-441a-8554-a172b874d891 service nova] [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] Received event network-changed-b5922da6-f3d2-478a-8756-ea7020186366 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1586.832259] env[62405]: DEBUG nova.compute.manager [req-614a07fd-f187-471f-86d1-f430909e5c59 req-77fdeab6-de92-441a-8554-a172b874d891 service nova] [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] Refreshing instance network info cache due to event network-changed-b5922da6-f3d2-478a-8756-ea7020186366. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1586.832636] env[62405]: DEBUG oslo_concurrency.lockutils [req-614a07fd-f187-471f-86d1-f430909e5c59 req-77fdeab6-de92-441a-8554-a172b874d891 service nova] Acquiring lock "refresh_cache-fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1586.832824] env[62405]: DEBUG oslo_concurrency.lockutils [req-614a07fd-f187-471f-86d1-f430909e5c59 req-77fdeab6-de92-441a-8554-a172b874d891 service nova] Acquired lock "refresh_cache-fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1586.833170] env[62405]: DEBUG nova.network.neutron [req-614a07fd-f187-471f-86d1-f430909e5c59 req-77fdeab6-de92-441a-8554-a172b874d891 service nova] [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] Refreshing network info cache for port b5922da6-f3d2-478a-8756-ea7020186366 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1586.845131] env[62405]: DEBUG oslo_vmware.api [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1946997, 'name': PowerOnVM_Task} progress is 64%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1586.845818] env[62405]: DEBUG nova.compute.manager [req-6577667a-818e-4199-b663-1fe6e0ca6e6d req-618b8d14-2a2e-4947-9e22-596ddba2c751 service nova] [instance: 65462c7a-372e-4ba6-8f6d-e300080d65d0] Received event network-vif-deleted-62da0bb8-4a2d-4e69-a4da-3970ca057cad {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1587.036710] env[62405]: DEBUG oslo_vmware.api [None req-34e409f5-4968-4fce-8e2c-e9aa257078cb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52314455-1187-dfbb-20a9-78f236fed929, 'name': SearchDatastore_Task, 'duration_secs': 0.032006} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1587.036996] env[62405]: DEBUG oslo_concurrency.lockutils [None req-34e409f5-4968-4fce-8e2c-e9aa257078cb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1587.037262] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-34e409f5-4968-4fce-8e2c-e9aa257078cb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] e8ed73c3-fb86-42c3-aae6-b0c8d03149ce/e8ed73c3-fb86-42c3-aae6-b0c8d03149ce.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1587.037524] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2ebaaf79-124e-442f-8a29-701999bde016 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.046151] env[62405]: DEBUG oslo_vmware.api [None req-34e409f5-4968-4fce-8e2c-e9aa257078cb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for the task: (returnval){ [ 1587.046151] env[62405]: value = "task-1947003" [ 1587.046151] env[62405]: _type = "Task" [ 1587.046151] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1587.055272] env[62405]: DEBUG oslo_vmware.api [None req-34e409f5-4968-4fce-8e2c-e9aa257078cb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947003, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1587.086584] env[62405]: DEBUG oslo_vmware.api [None req-180e9cc9-d85e-4c76-b477-828efa2e0af1 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': task-1946998, 'name': RemoveSnapshot_Task, 'duration_secs': 0.99207} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1587.087631] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-180e9cc9-d85e-4c76-b477-828efa2e0af1 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b3647042-89a1-4d15-b85e-49a5c8def1d4] Deleted Snapshot of the VM instance {{(pid=62405) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1587.087867] env[62405]: INFO nova.compute.manager [None req-180e9cc9-d85e-4c76-b477-828efa2e0af1 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b3647042-89a1-4d15-b85e-49a5c8def1d4] Took 21.22 seconds to snapshot the instance on the hypervisor. [ 1587.096610] env[62405]: DEBUG oslo_vmware.api [None req-2a532060-fb07-4421-b55c-c37fca490a6c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1946999, 'name': Destroy_Task} progress is 33%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1587.168041] env[62405]: DEBUG nova.scheduler.client.report [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1587.176853] env[62405]: DEBUG oslo_concurrency.lockutils [None req-1d13a535-5f61-4ffa-b78f-a8c9ac45767e tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1587.250716] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947002, 'name': CreateVM_Task, 'duration_secs': 0.404653} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1587.250939] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 78b4c6ea-6f5b-40d8-8c4a-10332f176e0b] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1587.251731] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1587.251931] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1587.252382] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1587.252721] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a80481c1-c745-4494-ac7f-6bc915497933 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.260795] env[62405]: DEBUG oslo_vmware.api [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Waiting for the task: (returnval){ [ 1587.260795] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52ed6b74-c2ff-de83-88dc-ebdfe2dcda80" [ 1587.260795] env[62405]: _type = "Task" [ 1587.260795] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1587.274836] env[62405]: DEBUG oslo_vmware.api [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52ed6b74-c2ff-de83-88dc-ebdfe2dcda80, 'name': SearchDatastore_Task, 'duration_secs': 0.010736} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1587.275386] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1587.275861] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] [instance: 78b4c6ea-6f5b-40d8-8c4a-10332f176e0b] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1587.276331] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1587.276577] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1587.276908] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1587.277348] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-19f39a87-ca2e-4d2f-926a-4bb36fd33459 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.287977] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1587.288257] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1587.289304] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b436f837-aa02-48fe-830b-684bcbe35c72 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.296406] env[62405]: DEBUG oslo_vmware.api [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Waiting for the task: (returnval){ [ 1587.296406] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]520c1f2c-974b-aaf9-e619-a477193ca9bb" [ 1587.296406] env[62405]: _type = "Task" [ 1587.296406] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1587.312563] env[62405]: DEBUG oslo_vmware.api [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]520c1f2c-974b-aaf9-e619-a477193ca9bb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1587.336115] env[62405]: DEBUG oslo_vmware.api [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1946997, 'name': PowerOnVM_Task, 'duration_secs': 1.512657} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1587.340183] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 900b95b5-fe5a-46c1-909a-f81b82ced0ef] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1587.340183] env[62405]: INFO nova.compute.manager [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 900b95b5-fe5a-46c1-909a-f81b82ced0ef] Took 9.90 seconds to spawn the instance on the hypervisor. [ 1587.340183] env[62405]: DEBUG nova.compute.manager [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 900b95b5-fe5a-46c1-909a-f81b82ced0ef] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1587.341244] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-758c646f-bc90-45cf-bae7-faeeb3cc4f43 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.448336] env[62405]: DEBUG oslo_concurrency.lockutils [None req-68275af2-ae8f-46ab-9c97-43ca2e39b453 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Acquiring lock "65462c7a-372e-4ba6-8f6d-e300080d65d0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1587.556316] env[62405]: DEBUG oslo_vmware.api [None req-34e409f5-4968-4fce-8e2c-e9aa257078cb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947003, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1587.587257] env[62405]: DEBUG oslo_vmware.api [None req-2a532060-fb07-4421-b55c-c37fca490a6c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1946999, 'name': Destroy_Task, 'duration_secs': 0.558317} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1587.587257] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-2a532060-fb07-4421-b55c-c37fca490a6c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: ca4d11fe-1d0f-468b-a2f4-21c5b84342ab] Destroyed the VM [ 1587.587393] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-2a532060-fb07-4421-b55c-c37fca490a6c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: ca4d11fe-1d0f-468b-a2f4-21c5b84342ab] Deleting Snapshot of the VM instance {{(pid=62405) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1587.589297] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-fec9d8f1-3ce4-41f0-b04f-d5f573d0d401 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.595061] env[62405]: DEBUG oslo_vmware.api [None req-2a532060-fb07-4421-b55c-c37fca490a6c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Waiting for the task: (returnval){ [ 1587.595061] env[62405]: value = "task-1947004" [ 1587.595061] env[62405]: _type = "Task" [ 1587.595061] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1587.598989] env[62405]: DEBUG nova.network.neutron [req-614a07fd-f187-471f-86d1-f430909e5c59 req-77fdeab6-de92-441a-8554-a172b874d891 service nova] [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] Updated VIF entry in instance network info cache for port b5922da6-f3d2-478a-8756-ea7020186366. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1587.599502] env[62405]: DEBUG nova.network.neutron [req-614a07fd-f187-471f-86d1-f430909e5c59 req-77fdeab6-de92-441a-8554-a172b874d891 service nova] [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] Updating instance_info_cache with network_info: [{"id": "b5922da6-f3d2-478a-8756-ea7020186366", "address": "fa:16:3e:2e:48:34", "network": {"id": "8c716b29-2304-47f6-8885-183e7c81bda2", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-73064672-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.136", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3c96e0244edf49db9cd520b5e359fc87", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9643129c-1d95-4422-9df1-2c21289bd5d6", "external-id": "nsx-vlan-transportzone-917", "segmentation_id": 917, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb5922da6-f3", "ovs_interfaceid": "b5922da6-f3d2-478a-8756-ea7020186366", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1587.608881] env[62405]: DEBUG oslo_vmware.api [None req-2a532060-fb07-4421-b55c-c37fca490a6c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947004, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1587.674330] env[62405]: DEBUG oslo_concurrency.lockutils [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.706s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1587.674599] env[62405]: DEBUG nova.compute.manager [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] [instance: 9e73e2ab-1eac-4aca-905f-a8391d3f5a9b] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1587.678858] env[62405]: DEBUG oslo_concurrency.lockutils [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.691s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1587.680664] env[62405]: INFO nova.compute.claims [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] [instance: 0feaeb5d-9f4a-4166-99b1-f213bc4fa458] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1587.812434] env[62405]: DEBUG oslo_vmware.api [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]520c1f2c-974b-aaf9-e619-a477193ca9bb, 'name': SearchDatastore_Task, 'duration_secs': 0.094176} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1587.813434] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7796f014-cbe9-4b44-916d-601d414d5ef4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.821415] env[62405]: DEBUG oslo_vmware.api [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Waiting for the task: (returnval){ [ 1587.821415] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]528fae5d-736f-c8fb-77ca-4d81a73b57ad" [ 1587.821415] env[62405]: _type = "Task" [ 1587.821415] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1587.834537] env[62405]: DEBUG oslo_vmware.api [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]528fae5d-736f-c8fb-77ca-4d81a73b57ad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1587.869460] env[62405]: INFO nova.compute.manager [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 900b95b5-fe5a-46c1-909a-f81b82ced0ef] Took 45.59 seconds to build instance. [ 1588.057592] env[62405]: DEBUG oslo_vmware.api [None req-34e409f5-4968-4fce-8e2c-e9aa257078cb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947003, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.653218} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1588.057890] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-34e409f5-4968-4fce-8e2c-e9aa257078cb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] e8ed73c3-fb86-42c3-aae6-b0c8d03149ce/e8ed73c3-fb86-42c3-aae6-b0c8d03149ce.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1588.058595] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-34e409f5-4968-4fce-8e2c-e9aa257078cb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: e8ed73c3-fb86-42c3-aae6-b0c8d03149ce] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1588.058595] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-883c18ff-f00a-4176-8d9d-a5a0e003b89c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.065398] env[62405]: DEBUG oslo_vmware.api [None req-34e409f5-4968-4fce-8e2c-e9aa257078cb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for the task: (returnval){ [ 1588.065398] env[62405]: value = "task-1947005" [ 1588.065398] env[62405]: _type = "Task" [ 1588.065398] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1588.074769] env[62405]: DEBUG oslo_vmware.api [None req-34e409f5-4968-4fce-8e2c-e9aa257078cb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947005, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1588.105526] env[62405]: DEBUG oslo_concurrency.lockutils [req-614a07fd-f187-471f-86d1-f430909e5c59 req-77fdeab6-de92-441a-8554-a172b874d891 service nova] Releasing lock "refresh_cache-fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1588.105874] env[62405]: DEBUG nova.compute.manager [req-614a07fd-f187-471f-86d1-f430909e5c59 req-77fdeab6-de92-441a-8554-a172b874d891 service nova] [instance: 900b95b5-fe5a-46c1-909a-f81b82ced0ef] Received event network-vif-plugged-cf9dc646-dc3f-46c3-9291-5f2caa585662 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1588.106069] env[62405]: DEBUG oslo_concurrency.lockutils [req-614a07fd-f187-471f-86d1-f430909e5c59 req-77fdeab6-de92-441a-8554-a172b874d891 service nova] Acquiring lock "900b95b5-fe5a-46c1-909a-f81b82ced0ef-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1588.106315] env[62405]: DEBUG oslo_concurrency.lockutils [req-614a07fd-f187-471f-86d1-f430909e5c59 req-77fdeab6-de92-441a-8554-a172b874d891 service nova] Lock "900b95b5-fe5a-46c1-909a-f81b82ced0ef-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1588.106470] env[62405]: DEBUG oslo_concurrency.lockutils [req-614a07fd-f187-471f-86d1-f430909e5c59 req-77fdeab6-de92-441a-8554-a172b874d891 service nova] Lock "900b95b5-fe5a-46c1-909a-f81b82ced0ef-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1588.106659] env[62405]: DEBUG nova.compute.manager [req-614a07fd-f187-471f-86d1-f430909e5c59 req-77fdeab6-de92-441a-8554-a172b874d891 service nova] [instance: 900b95b5-fe5a-46c1-909a-f81b82ced0ef] No waiting events found dispatching network-vif-plugged-cf9dc646-dc3f-46c3-9291-5f2caa585662 {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1588.106878] env[62405]: WARNING nova.compute.manager [req-614a07fd-f187-471f-86d1-f430909e5c59 req-77fdeab6-de92-441a-8554-a172b874d891 service nova] [instance: 900b95b5-fe5a-46c1-909a-f81b82ced0ef] Received unexpected event network-vif-plugged-cf9dc646-dc3f-46c3-9291-5f2caa585662 for instance with vm_state building and task_state spawning. [ 1588.107083] env[62405]: DEBUG nova.compute.manager [req-614a07fd-f187-471f-86d1-f430909e5c59 req-77fdeab6-de92-441a-8554-a172b874d891 service nova] [instance: 900b95b5-fe5a-46c1-909a-f81b82ced0ef] Received event network-changed-cf9dc646-dc3f-46c3-9291-5f2caa585662 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1588.107266] env[62405]: DEBUG nova.compute.manager [req-614a07fd-f187-471f-86d1-f430909e5c59 req-77fdeab6-de92-441a-8554-a172b874d891 service nova] [instance: 900b95b5-fe5a-46c1-909a-f81b82ced0ef] Refreshing instance network info cache due to event network-changed-cf9dc646-dc3f-46c3-9291-5f2caa585662. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1588.107477] env[62405]: DEBUG oslo_concurrency.lockutils [req-614a07fd-f187-471f-86d1-f430909e5c59 req-77fdeab6-de92-441a-8554-a172b874d891 service nova] Acquiring lock "refresh_cache-900b95b5-fe5a-46c1-909a-f81b82ced0ef" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1588.107652] env[62405]: DEBUG oslo_concurrency.lockutils [req-614a07fd-f187-471f-86d1-f430909e5c59 req-77fdeab6-de92-441a-8554-a172b874d891 service nova] Acquired lock "refresh_cache-900b95b5-fe5a-46c1-909a-f81b82ced0ef" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1588.107835] env[62405]: DEBUG nova.network.neutron [req-614a07fd-f187-471f-86d1-f430909e5c59 req-77fdeab6-de92-441a-8554-a172b874d891 service nova] [instance: 900b95b5-fe5a-46c1-909a-f81b82ced0ef] Refreshing network info cache for port cf9dc646-dc3f-46c3-9291-5f2caa585662 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1588.109964] env[62405]: DEBUG oslo_vmware.api [None req-2a532060-fb07-4421-b55c-c37fca490a6c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947004, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1588.189352] env[62405]: DEBUG nova.compute.utils [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1588.196156] env[62405]: DEBUG nova.compute.manager [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] [instance: 9e73e2ab-1eac-4aca-905f-a8391d3f5a9b] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1588.196272] env[62405]: DEBUG nova.network.neutron [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] [instance: 9e73e2ab-1eac-4aca-905f-a8391d3f5a9b] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1588.293480] env[62405]: DEBUG nova.policy [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '608e36c6e8064610915af8eeabcff998', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '742aa597f5714acf813be270cf2fae15', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1588.307061] env[62405]: DEBUG oslo_concurrency.lockutils [None req-73596299-093f-48b9-ba92-afb8961301ad tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Acquiring lock "14dab775-19b4-4d0d-a7ee-67705f7e45ca" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1588.307363] env[62405]: DEBUG oslo_concurrency.lockutils [None req-73596299-093f-48b9-ba92-afb8961301ad tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Lock "14dab775-19b4-4d0d-a7ee-67705f7e45ca" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1588.307642] env[62405]: DEBUG oslo_concurrency.lockutils [None req-73596299-093f-48b9-ba92-afb8961301ad tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Acquiring lock "14dab775-19b4-4d0d-a7ee-67705f7e45ca-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1588.307995] env[62405]: DEBUG oslo_concurrency.lockutils [None req-73596299-093f-48b9-ba92-afb8961301ad tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Lock "14dab775-19b4-4d0d-a7ee-67705f7e45ca-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1588.308378] env[62405]: DEBUG oslo_concurrency.lockutils [None req-73596299-093f-48b9-ba92-afb8961301ad tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Lock "14dab775-19b4-4d0d-a7ee-67705f7e45ca-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1588.311145] env[62405]: INFO nova.compute.manager [None req-73596299-093f-48b9-ba92-afb8961301ad tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] [instance: 14dab775-19b4-4d0d-a7ee-67705f7e45ca] Terminating instance [ 1588.332555] env[62405]: DEBUG oslo_vmware.api [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]528fae5d-736f-c8fb-77ca-4d81a73b57ad, 'name': SearchDatastore_Task, 'duration_secs': 0.032449} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1588.333666] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1588.333666] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 78b4c6ea-6f5b-40d8-8c4a-10332f176e0b/78b4c6ea-6f5b-40d8-8c4a-10332f176e0b.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1588.333666] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-afa927b6-430a-4b86-a0cf-15f91723b481 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.340859] env[62405]: DEBUG oslo_vmware.api [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Waiting for the task: (returnval){ [ 1588.340859] env[62405]: value = "task-1947006" [ 1588.340859] env[62405]: _type = "Task" [ 1588.340859] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1588.349079] env[62405]: DEBUG oslo_vmware.api [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Task: {'id': task-1947006, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1588.371252] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e5539d29-aed0-4aa1-b234-c408cff9f65b tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Lock "900b95b5-fe5a-46c1-909a-f81b82ced0ef" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 63.742s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1588.575220] env[62405]: DEBUG oslo_vmware.api [None req-34e409f5-4968-4fce-8e2c-e9aa257078cb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947005, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069641} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1588.575586] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-34e409f5-4968-4fce-8e2c-e9aa257078cb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: e8ed73c3-fb86-42c3-aae6-b0c8d03149ce] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1588.576425] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e9a2540-9e8d-462a-942f-9b7ea1ea0f56 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.605101] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-34e409f5-4968-4fce-8e2c-e9aa257078cb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: e8ed73c3-fb86-42c3-aae6-b0c8d03149ce] Reconfiguring VM instance instance-0000001b to attach disk [datastore1] e8ed73c3-fb86-42c3-aae6-b0c8d03149ce/e8ed73c3-fb86-42c3-aae6-b0c8d03149ce.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1588.605457] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b38f6cc3-b605-41e4-945e-994dbfe86a57 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.633515] env[62405]: DEBUG oslo_vmware.api [None req-2a532060-fb07-4421-b55c-c37fca490a6c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947004, 'name': RemoveSnapshot_Task, 'duration_secs': 0.721184} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1588.634704] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-2a532060-fb07-4421-b55c-c37fca490a6c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: ca4d11fe-1d0f-468b-a2f4-21c5b84342ab] Deleted Snapshot of the VM instance {{(pid=62405) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1588.634969] env[62405]: INFO nova.compute.manager [None req-2a532060-fb07-4421-b55c-c37fca490a6c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: ca4d11fe-1d0f-468b-a2f4-21c5b84342ab] Took 16.29 seconds to snapshot the instance on the hypervisor. [ 1588.641032] env[62405]: DEBUG oslo_vmware.api [None req-34e409f5-4968-4fce-8e2c-e9aa257078cb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for the task: (returnval){ [ 1588.641032] env[62405]: value = "task-1947007" [ 1588.641032] env[62405]: _type = "Task" [ 1588.641032] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1588.650298] env[62405]: DEBUG oslo_vmware.api [None req-34e409f5-4968-4fce-8e2c-e9aa257078cb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947007, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1588.697246] env[62405]: DEBUG nova.compute.manager [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] [instance: 9e73e2ab-1eac-4aca-905f-a8391d3f5a9b] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1588.699244] env[62405]: DEBUG oslo_concurrency.lockutils [None req-10ac8d9f-6d2c-433c-a3e6-e6ec33a82234 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Acquiring lock "ca4d11fe-1d0f-468b-a2f4-21c5b84342ab" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1588.699514] env[62405]: DEBUG oslo_concurrency.lockutils [None req-10ac8d9f-6d2c-433c-a3e6-e6ec33a82234 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Lock "ca4d11fe-1d0f-468b-a2f4-21c5b84342ab" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1588.699712] env[62405]: DEBUG oslo_concurrency.lockutils [None req-10ac8d9f-6d2c-433c-a3e6-e6ec33a82234 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Acquiring lock "ca4d11fe-1d0f-468b-a2f4-21c5b84342ab-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1588.699949] env[62405]: DEBUG oslo_concurrency.lockutils [None req-10ac8d9f-6d2c-433c-a3e6-e6ec33a82234 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Lock "ca4d11fe-1d0f-468b-a2f4-21c5b84342ab-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1588.700193] env[62405]: DEBUG oslo_concurrency.lockutils [None req-10ac8d9f-6d2c-433c-a3e6-e6ec33a82234 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Lock "ca4d11fe-1d0f-468b-a2f4-21c5b84342ab-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1588.706969] env[62405]: INFO nova.compute.manager [None req-10ac8d9f-6d2c-433c-a3e6-e6ec33a82234 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: ca4d11fe-1d0f-468b-a2f4-21c5b84342ab] Terminating instance [ 1588.776167] env[62405]: DEBUG nova.network.neutron [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] [instance: 9e73e2ab-1eac-4aca-905f-a8391d3f5a9b] Successfully created port: 0db09e81-ebc5-4f46-bed2-99bdd6a93b15 {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1588.816925] env[62405]: DEBUG nova.compute.manager [None req-73596299-093f-48b9-ba92-afb8961301ad tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] [instance: 14dab775-19b4-4d0d-a7ee-67705f7e45ca] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1588.817233] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-73596299-093f-48b9-ba92-afb8961301ad tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] [instance: 14dab775-19b4-4d0d-a7ee-67705f7e45ca] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1588.818157] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-490e9a42-733b-401d-ad3f-34fa871ee47d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.832902] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-73596299-093f-48b9-ba92-afb8961301ad tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] [instance: 14dab775-19b4-4d0d-a7ee-67705f7e45ca] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1588.833234] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ec895b70-5a0d-4095-b12f-907664abc09e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.840994] env[62405]: DEBUG oslo_vmware.api [None req-73596299-093f-48b9-ba92-afb8961301ad tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Waiting for the task: (returnval){ [ 1588.840994] env[62405]: value = "task-1947008" [ 1588.840994] env[62405]: _type = "Task" [ 1588.840994] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1588.858583] env[62405]: DEBUG oslo_vmware.api [None req-73596299-093f-48b9-ba92-afb8961301ad tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Task: {'id': task-1947008, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1588.863831] env[62405]: DEBUG oslo_vmware.api [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Task: {'id': task-1947006, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1588.875236] env[62405]: DEBUG nova.compute.manager [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] [instance: 8ea521a4-7d4c-4f0f-9bf6-44b8559eefd6] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1589.063289] env[62405]: DEBUG nova.network.neutron [req-614a07fd-f187-471f-86d1-f430909e5c59 req-77fdeab6-de92-441a-8554-a172b874d891 service nova] [instance: 900b95b5-fe5a-46c1-909a-f81b82ced0ef] Updated VIF entry in instance network info cache for port cf9dc646-dc3f-46c3-9291-5f2caa585662. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1589.063760] env[62405]: DEBUG nova.network.neutron [req-614a07fd-f187-471f-86d1-f430909e5c59 req-77fdeab6-de92-441a-8554-a172b874d891 service nova] [instance: 900b95b5-fe5a-46c1-909a-f81b82ced0ef] Updating instance_info_cache with network_info: [{"id": "cf9dc646-dc3f-46c3-9291-5f2caa585662", "address": "fa:16:3e:e8:3d:03", "network": {"id": "7398b956-045a-440c-b8fc-34bad57dbc27", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1969082667-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "41626e27199f4370a2554bb243a72d41", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "50171613-b419-45e3-9ada-fcb6cd921428", "external-id": "nsx-vlan-transportzone-914", "segmentation_id": 914, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf9dc646-dc", "ovs_interfaceid": "cf9dc646-dc3f-46c3-9291-5f2caa585662", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1589.138927] env[62405]: DEBUG nova.compute.manager [None req-2a532060-fb07-4421-b55c-c37fca490a6c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: ca4d11fe-1d0f-468b-a2f4-21c5b84342ab] Instance disappeared during snapshot {{(pid=62405) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4595}} [ 1589.158745] env[62405]: DEBUG nova.compute.manager [None req-9d6ef053-41e4-4b22-9fea-f4aaa6f8dc94 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b8ff115b-64f1-4584-afa2-478c5e6b726b] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1589.159263] env[62405]: DEBUG oslo_vmware.api [None req-34e409f5-4968-4fce-8e2c-e9aa257078cb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947007, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1589.161053] env[62405]: DEBUG nova.compute.manager [None req-2a532060-fb07-4421-b55c-c37fca490a6c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Image not found during clean up ec0993f0-0095-4523-861c-992c53a631c4 {{(pid=62405) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4601}} [ 1589.166008] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52c0042a-564d-43dc-b5f7-19c69c867290 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.209220] env[62405]: DEBUG nova.compute.manager [None req-10ac8d9f-6d2c-433c-a3e6-e6ec33a82234 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: ca4d11fe-1d0f-468b-a2f4-21c5b84342ab] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1589.209337] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-10ac8d9f-6d2c-433c-a3e6-e6ec33a82234 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: ca4d11fe-1d0f-468b-a2f4-21c5b84342ab] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1589.216265] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe90b617-c31d-48d4-be6a-373bd68c16a2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.226976] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-10ac8d9f-6d2c-433c-a3e6-e6ec33a82234 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: ca4d11fe-1d0f-468b-a2f4-21c5b84342ab] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1589.228225] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8d08aa1e-c4e5-4c69-adaf-6304b1c71412 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.327036] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-10ac8d9f-6d2c-433c-a3e6-e6ec33a82234 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: ca4d11fe-1d0f-468b-a2f4-21c5b84342ab] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1589.327036] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-10ac8d9f-6d2c-433c-a3e6-e6ec33a82234 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: ca4d11fe-1d0f-468b-a2f4-21c5b84342ab] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1589.327036] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-10ac8d9f-6d2c-433c-a3e6-e6ec33a82234 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Deleting the datastore file [datastore1] ca4d11fe-1d0f-468b-a2f4-21c5b84342ab {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1589.327271] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-69b47dfe-284d-4d9c-a7e9-9b0bd40fc649 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.336751] env[62405]: DEBUG oslo_vmware.api [None req-10ac8d9f-6d2c-433c-a3e6-e6ec33a82234 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Waiting for the task: (returnval){ [ 1589.336751] env[62405]: value = "task-1947010" [ 1589.336751] env[62405]: _type = "Task" [ 1589.336751] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1589.356808] env[62405]: DEBUG oslo_vmware.api [None req-10ac8d9f-6d2c-433c-a3e6-e6ec33a82234 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947010, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1589.360620] env[62405]: DEBUG oslo_vmware.api [None req-73596299-093f-48b9-ba92-afb8961301ad tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Task: {'id': task-1947008, 'name': PowerOffVM_Task, 'duration_secs': 0.319699} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1589.365216] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-73596299-093f-48b9-ba92-afb8961301ad tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] [instance: 14dab775-19b4-4d0d-a7ee-67705f7e45ca] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1589.365216] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-73596299-093f-48b9-ba92-afb8961301ad tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] [instance: 14dab775-19b4-4d0d-a7ee-67705f7e45ca] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1589.365216] env[62405]: DEBUG oslo_vmware.api [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Task: {'id': task-1947006, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.879473} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1589.367255] env[62405]: DEBUG nova.compute.manager [req-6fcd0091-bc5b-4670-8e68-4fbda2b962cf req-5367bddd-06f7-4528-a73d-dbb710abc324 service nova] [instance: 262424b0-dc7d-4b6c-9539-2d6cd23a93da] Received event network-changed-0974798a-a146-421e-a104-caeb56db51b3 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1589.367255] env[62405]: DEBUG nova.compute.manager [req-6fcd0091-bc5b-4670-8e68-4fbda2b962cf req-5367bddd-06f7-4528-a73d-dbb710abc324 service nova] [instance: 262424b0-dc7d-4b6c-9539-2d6cd23a93da] Refreshing instance network info cache due to event network-changed-0974798a-a146-421e-a104-caeb56db51b3. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1589.367255] env[62405]: DEBUG oslo_concurrency.lockutils [req-6fcd0091-bc5b-4670-8e68-4fbda2b962cf req-5367bddd-06f7-4528-a73d-dbb710abc324 service nova] Acquiring lock "refresh_cache-262424b0-dc7d-4b6c-9539-2d6cd23a93da" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1589.367255] env[62405]: DEBUG oslo_concurrency.lockutils [req-6fcd0091-bc5b-4670-8e68-4fbda2b962cf req-5367bddd-06f7-4528-a73d-dbb710abc324 service nova] Acquired lock "refresh_cache-262424b0-dc7d-4b6c-9539-2d6cd23a93da" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1589.367255] env[62405]: DEBUG nova.network.neutron [req-6fcd0091-bc5b-4670-8e68-4fbda2b962cf req-5367bddd-06f7-4528-a73d-dbb710abc324 service nova] [instance: 262424b0-dc7d-4b6c-9539-2d6cd23a93da] Refreshing network info cache for port 0974798a-a146-421e-a104-caeb56db51b3 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1589.369274] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-526dfa9a-911c-4b91-b5c6-cadcb0add92d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.371911] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 78b4c6ea-6f5b-40d8-8c4a-10332f176e0b/78b4c6ea-6f5b-40d8-8c4a-10332f176e0b.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1589.371911] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] [instance: 78b4c6ea-6f5b-40d8-8c4a-10332f176e0b] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1589.372240] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de24f743-7d1a-4b97-9070-648676f329ed {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.379532] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-66fe79eb-87bf-4dd3-ba73-000e3a3e4662 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.390284] env[62405]: DEBUG nova.compute.manager [req-3bfc16e4-917c-49e5-abae-7194c1f1f9ea req-bff7f121-8323-4ce3-a6ee-d580748427a4 service nova] [instance: 78b4c6ea-6f5b-40d8-8c4a-10332f176e0b] Received event network-vif-plugged-95874447-5114-44c9-8785-0134bd6173f2 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1589.390499] env[62405]: DEBUG oslo_concurrency.lockutils [req-3bfc16e4-917c-49e5-abae-7194c1f1f9ea req-bff7f121-8323-4ce3-a6ee-d580748427a4 service nova] Acquiring lock "78b4c6ea-6f5b-40d8-8c4a-10332f176e0b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1589.390708] env[62405]: DEBUG oslo_concurrency.lockutils [req-3bfc16e4-917c-49e5-abae-7194c1f1f9ea req-bff7f121-8323-4ce3-a6ee-d580748427a4 service nova] Lock "78b4c6ea-6f5b-40d8-8c4a-10332f176e0b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1589.390875] env[62405]: DEBUG oslo_concurrency.lockutils [req-3bfc16e4-917c-49e5-abae-7194c1f1f9ea req-bff7f121-8323-4ce3-a6ee-d580748427a4 service nova] Lock "78b4c6ea-6f5b-40d8-8c4a-10332f176e0b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1589.391083] env[62405]: DEBUG nova.compute.manager [req-3bfc16e4-917c-49e5-abae-7194c1f1f9ea req-bff7f121-8323-4ce3-a6ee-d580748427a4 service nova] [instance: 78b4c6ea-6f5b-40d8-8c4a-10332f176e0b] No waiting events found dispatching network-vif-plugged-95874447-5114-44c9-8785-0134bd6173f2 {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1589.391777] env[62405]: WARNING nova.compute.manager [req-3bfc16e4-917c-49e5-abae-7194c1f1f9ea req-bff7f121-8323-4ce3-a6ee-d580748427a4 service nova] [instance: 78b4c6ea-6f5b-40d8-8c4a-10332f176e0b] Received unexpected event network-vif-plugged-95874447-5114-44c9-8785-0134bd6173f2 for instance with vm_state building and task_state spawning. [ 1589.391777] env[62405]: DEBUG nova.compute.manager [req-3bfc16e4-917c-49e5-abae-7194c1f1f9ea req-bff7f121-8323-4ce3-a6ee-d580748427a4 service nova] [instance: 78b4c6ea-6f5b-40d8-8c4a-10332f176e0b] Received event network-changed-95874447-5114-44c9-8785-0134bd6173f2 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1589.391777] env[62405]: DEBUG nova.compute.manager [req-3bfc16e4-917c-49e5-abae-7194c1f1f9ea req-bff7f121-8323-4ce3-a6ee-d580748427a4 service nova] [instance: 78b4c6ea-6f5b-40d8-8c4a-10332f176e0b] Refreshing instance network info cache due to event network-changed-95874447-5114-44c9-8785-0134bd6173f2. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1589.391777] env[62405]: DEBUG oslo_concurrency.lockutils [req-3bfc16e4-917c-49e5-abae-7194c1f1f9ea req-bff7f121-8323-4ce3-a6ee-d580748427a4 service nova] Acquiring lock "refresh_cache-78b4c6ea-6f5b-40d8-8c4a-10332f176e0b" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1589.392020] env[62405]: DEBUG oslo_concurrency.lockutils [req-3bfc16e4-917c-49e5-abae-7194c1f1f9ea req-bff7f121-8323-4ce3-a6ee-d580748427a4 service nova] Acquired lock "refresh_cache-78b4c6ea-6f5b-40d8-8c4a-10332f176e0b" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1589.392020] env[62405]: DEBUG nova.network.neutron [req-3bfc16e4-917c-49e5-abae-7194c1f1f9ea req-bff7f121-8323-4ce3-a6ee-d580748427a4 service nova] [instance: 78b4c6ea-6f5b-40d8-8c4a-10332f176e0b] Refreshing network info cache for port 95874447-5114-44c9-8785-0134bd6173f2 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1589.396876] env[62405]: DEBUG oslo_vmware.api [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Waiting for the task: (returnval){ [ 1589.396876] env[62405]: value = "task-1947012" [ 1589.396876] env[62405]: _type = "Task" [ 1589.396876] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1589.401111] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f96d7c2-ab12-413e-81dd-500978de6ce1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.415601] env[62405]: DEBUG oslo_concurrency.lockutils [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1589.440770] env[62405]: DEBUG oslo_vmware.api [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Task: {'id': task-1947012, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1589.446154] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92ceec0e-6567-48b7-baae-acd11d7be392 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.456084] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6a5d77e-e95c-4bf0-b114-c20c989eb54a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.471728] env[62405]: DEBUG nova.compute.provider_tree [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1589.474073] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2e2f12fd-1e79-4ee3-9664-fe5a7c93d33f tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquiring lock "900b95b5-fe5a-46c1-909a-f81b82ced0ef" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1589.474448] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2e2f12fd-1e79-4ee3-9664-fe5a7c93d33f tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Lock "900b95b5-fe5a-46c1-909a-f81b82ced0ef" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1589.566929] env[62405]: DEBUG oslo_concurrency.lockutils [req-614a07fd-f187-471f-86d1-f430909e5c59 req-77fdeab6-de92-441a-8554-a172b874d891 service nova] Releasing lock "refresh_cache-900b95b5-fe5a-46c1-909a-f81b82ced0ef" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1589.612925] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-73596299-093f-48b9-ba92-afb8961301ad tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] [instance: 14dab775-19b4-4d0d-a7ee-67705f7e45ca] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1589.613193] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-73596299-093f-48b9-ba92-afb8961301ad tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] [instance: 14dab775-19b4-4d0d-a7ee-67705f7e45ca] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1589.614802] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-73596299-093f-48b9-ba92-afb8961301ad tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Deleting the datastore file [datastore1] 14dab775-19b4-4d0d-a7ee-67705f7e45ca {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1589.614802] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6be65f2d-f0ef-446b-8bf6-dc18d28bd1e7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.621997] env[62405]: DEBUG oslo_vmware.api [None req-73596299-093f-48b9-ba92-afb8961301ad tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Waiting for the task: (returnval){ [ 1589.621997] env[62405]: value = "task-1947013" [ 1589.621997] env[62405]: _type = "Task" [ 1589.621997] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1589.630759] env[62405]: DEBUG oslo_vmware.api [None req-73596299-093f-48b9-ba92-afb8961301ad tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Task: {'id': task-1947013, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1589.659022] env[62405]: DEBUG oslo_vmware.api [None req-34e409f5-4968-4fce-8e2c-e9aa257078cb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947007, 'name': ReconfigVM_Task, 'duration_secs': 0.64845} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1589.659022] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-34e409f5-4968-4fce-8e2c-e9aa257078cb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: e8ed73c3-fb86-42c3-aae6-b0c8d03149ce] Reconfigured VM instance instance-0000001b to attach disk [datastore1] e8ed73c3-fb86-42c3-aae6-b0c8d03149ce/e8ed73c3-fb86-42c3-aae6-b0c8d03149ce.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1589.659022] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d1d00c83-f6d7-41f0-9211-e6a285cd9c1a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.667682] env[62405]: DEBUG oslo_vmware.api [None req-34e409f5-4968-4fce-8e2c-e9aa257078cb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for the task: (returnval){ [ 1589.667682] env[62405]: value = "task-1947014" [ 1589.667682] env[62405]: _type = "Task" [ 1589.667682] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1589.678939] env[62405]: DEBUG oslo_vmware.api [None req-34e409f5-4968-4fce-8e2c-e9aa257078cb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947014, 'name': Rename_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1589.680114] env[62405]: INFO nova.compute.manager [None req-9d6ef053-41e4-4b22-9fea-f4aaa6f8dc94 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b8ff115b-64f1-4584-afa2-478c5e6b726b] instance snapshotting [ 1589.685505] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89715c5a-2721-4412-9c1b-81ad6995a516 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.705506] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fda86f41-0d6e-4697-8fc8-70802ba70986 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.720138] env[62405]: DEBUG nova.compute.manager [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] [instance: 9e73e2ab-1eac-4aca-905f-a8391d3f5a9b] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1589.726175] env[62405]: DEBUG nova.network.neutron [req-6fcd0091-bc5b-4670-8e68-4fbda2b962cf req-5367bddd-06f7-4528-a73d-dbb710abc324 service nova] [instance: 262424b0-dc7d-4b6c-9539-2d6cd23a93da] Updated VIF entry in instance network info cache for port 0974798a-a146-421e-a104-caeb56db51b3. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1589.726175] env[62405]: DEBUG nova.network.neutron [req-6fcd0091-bc5b-4670-8e68-4fbda2b962cf req-5367bddd-06f7-4528-a73d-dbb710abc324 service nova] [instance: 262424b0-dc7d-4b6c-9539-2d6cd23a93da] Updating instance_info_cache with network_info: [{"id": "0974798a-a146-421e-a104-caeb56db51b3", "address": "fa:16:3e:9a:d1:33", "network": {"id": "869979f7-5a22-4c11-bb77-c48a5d5f934f", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1534576533-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6d1aee7c44f44abc86ed5c15b027e989", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08e9585e-6186-4788-9fd9-24174ce45a6f", "external-id": "nsx-vlan-transportzone-254", "segmentation_id": 254, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0974798a-a1", "ovs_interfaceid": "0974798a-a146-421e-a104-caeb56db51b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1589.752825] env[62405]: DEBUG nova.virt.hardware [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1589.753107] env[62405]: DEBUG nova.virt.hardware [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1589.753280] env[62405]: DEBUG nova.virt.hardware [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1589.753468] env[62405]: DEBUG nova.virt.hardware [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1589.753620] env[62405]: DEBUG nova.virt.hardware [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1589.753815] env[62405]: DEBUG nova.virt.hardware [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1589.754106] env[62405]: DEBUG nova.virt.hardware [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1589.754303] env[62405]: DEBUG nova.virt.hardware [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1589.754509] env[62405]: DEBUG nova.virt.hardware [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1589.754661] env[62405]: DEBUG nova.virt.hardware [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1589.754858] env[62405]: DEBUG nova.virt.hardware [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1589.755770] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab687e43-4c7b-4583-9ab5-2a06ea39edc6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.765392] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8215121b-cceb-4548-8b3d-5bb533263509 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.848330] env[62405]: DEBUG oslo_vmware.api [None req-10ac8d9f-6d2c-433c-a3e6-e6ec33a82234 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947010, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.410025} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1589.848665] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-10ac8d9f-6d2c-433c-a3e6-e6ec33a82234 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1589.848933] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-10ac8d9f-6d2c-433c-a3e6-e6ec33a82234 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: ca4d11fe-1d0f-468b-a2f4-21c5b84342ab] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1589.849201] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-10ac8d9f-6d2c-433c-a3e6-e6ec33a82234 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: ca4d11fe-1d0f-468b-a2f4-21c5b84342ab] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1589.849434] env[62405]: INFO nova.compute.manager [None req-10ac8d9f-6d2c-433c-a3e6-e6ec33a82234 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: ca4d11fe-1d0f-468b-a2f4-21c5b84342ab] Took 0.64 seconds to destroy the instance on the hypervisor. [ 1589.850512] env[62405]: DEBUG oslo.service.loopingcall [None req-10ac8d9f-6d2c-433c-a3e6-e6ec33a82234 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1589.850512] env[62405]: DEBUG nova.compute.manager [-] [instance: ca4d11fe-1d0f-468b-a2f4-21c5b84342ab] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1589.850512] env[62405]: DEBUG nova.network.neutron [-] [instance: ca4d11fe-1d0f-468b-a2f4-21c5b84342ab] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1589.910349] env[62405]: DEBUG oslo_vmware.api [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Task: {'id': task-1947012, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07807} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1589.913117] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] [instance: 78b4c6ea-6f5b-40d8-8c4a-10332f176e0b] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1589.914017] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7819bb62-f76a-4891-9272-9aadbe52e2d2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.940812] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] [instance: 78b4c6ea-6f5b-40d8-8c4a-10332f176e0b] Reconfiguring VM instance instance-0000001f to attach disk [datastore1] 78b4c6ea-6f5b-40d8-8c4a-10332f176e0b/78b4c6ea-6f5b-40d8-8c4a-10332f176e0b.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1589.940812] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-744e6aa9-4c0f-4d9d-b6a1-7532f1775037 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.965429] env[62405]: DEBUG oslo_vmware.api [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Waiting for the task: (returnval){ [ 1589.965429] env[62405]: value = "task-1947015" [ 1589.965429] env[62405]: _type = "Task" [ 1589.965429] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1589.975906] env[62405]: DEBUG nova.scheduler.client.report [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1589.980408] env[62405]: DEBUG oslo_vmware.api [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Task: {'id': task-1947015, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1589.981626] env[62405]: DEBUG nova.compute.utils [None req-2e2f12fd-1e79-4ee3-9664-fe5a7c93d33f tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1590.021731] env[62405]: DEBUG oslo_concurrency.lockutils [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Acquiring lock "59957a81-5297-43d3-a673-024a53a19116" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1590.022421] env[62405]: DEBUG oslo_concurrency.lockutils [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Lock "59957a81-5297-43d3-a673-024a53a19116" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1590.133206] env[62405]: DEBUG oslo_vmware.api [None req-73596299-093f-48b9-ba92-afb8961301ad tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Task: {'id': task-1947013, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.275402} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1590.133554] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-73596299-093f-48b9-ba92-afb8961301ad tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1590.133889] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-73596299-093f-48b9-ba92-afb8961301ad tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] [instance: 14dab775-19b4-4d0d-a7ee-67705f7e45ca] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1590.133889] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-73596299-093f-48b9-ba92-afb8961301ad tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] [instance: 14dab775-19b4-4d0d-a7ee-67705f7e45ca] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1590.135451] env[62405]: INFO nova.compute.manager [None req-73596299-093f-48b9-ba92-afb8961301ad tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] [instance: 14dab775-19b4-4d0d-a7ee-67705f7e45ca] Took 1.32 seconds to destroy the instance on the hypervisor. [ 1590.135451] env[62405]: DEBUG oslo.service.loopingcall [None req-73596299-093f-48b9-ba92-afb8961301ad tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1590.135451] env[62405]: DEBUG nova.compute.manager [-] [instance: 14dab775-19b4-4d0d-a7ee-67705f7e45ca] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1590.135451] env[62405]: DEBUG nova.network.neutron [-] [instance: 14dab775-19b4-4d0d-a7ee-67705f7e45ca] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1590.178266] env[62405]: DEBUG oslo_vmware.api [None req-34e409f5-4968-4fce-8e2c-e9aa257078cb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947014, 'name': Rename_Task, 'duration_secs': 0.162296} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1590.178679] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-34e409f5-4968-4fce-8e2c-e9aa257078cb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: e8ed73c3-fb86-42c3-aae6-b0c8d03149ce] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1590.178780] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b332f136-f6ef-4cfb-adcb-08b34d02321f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.186659] env[62405]: DEBUG oslo_vmware.api [None req-34e409f5-4968-4fce-8e2c-e9aa257078cb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for the task: (returnval){ [ 1590.186659] env[62405]: value = "task-1947016" [ 1590.186659] env[62405]: _type = "Task" [ 1590.186659] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1590.195661] env[62405]: DEBUG oslo_vmware.api [None req-34e409f5-4968-4fce-8e2c-e9aa257078cb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947016, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1590.218459] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-9d6ef053-41e4-4b22-9fea-f4aaa6f8dc94 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b8ff115b-64f1-4584-afa2-478c5e6b726b] Creating Snapshot of the VM instance {{(pid=62405) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1590.218459] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-c5082108-d464-4930-85fc-8e6da68e33a1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.224459] env[62405]: DEBUG oslo_vmware.api [None req-9d6ef053-41e4-4b22-9fea-f4aaa6f8dc94 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Waiting for the task: (returnval){ [ 1590.224459] env[62405]: value = "task-1947017" [ 1590.224459] env[62405]: _type = "Task" [ 1590.224459] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1590.232436] env[62405]: DEBUG oslo_concurrency.lockutils [req-6fcd0091-bc5b-4670-8e68-4fbda2b962cf req-5367bddd-06f7-4528-a73d-dbb710abc324 service nova] Releasing lock "refresh_cache-262424b0-dc7d-4b6c-9539-2d6cd23a93da" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1590.240322] env[62405]: DEBUG oslo_vmware.api [None req-9d6ef053-41e4-4b22-9fea-f4aaa6f8dc94 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': task-1947017, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1590.480807] env[62405]: DEBUG oslo_vmware.api [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Task: {'id': task-1947015, 'name': ReconfigVM_Task, 'duration_secs': 0.368305} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1590.481310] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] [instance: 78b4c6ea-6f5b-40d8-8c4a-10332f176e0b] Reconfigured VM instance instance-0000001f to attach disk [datastore1] 78b4c6ea-6f5b-40d8-8c4a-10332f176e0b/78b4c6ea-6f5b-40d8-8c4a-10332f176e0b.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1590.482114] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6aad4516-7281-4f26-be7a-0cddb2ea8cbe {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.488222] env[62405]: DEBUG oslo_concurrency.lockutils [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.810s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1590.490512] env[62405]: DEBUG nova.compute.manager [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] [instance: 0feaeb5d-9f4a-4166-99b1-f213bc4fa458] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1590.495185] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2e2f12fd-1e79-4ee3-9664-fe5a7c93d33f tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Lock "900b95b5-fe5a-46c1-909a-f81b82ced0ef" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.018s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1590.495185] env[62405]: DEBUG oslo_concurrency.lockutils [None req-664be70a-5a90-415b-93d8-68e8e14e073c tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.037s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1590.495185] env[62405]: INFO nova.compute.claims [None req-664be70a-5a90-415b-93d8-68e8e14e073c tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] [instance: a73579d1-8647-49fe-98ce-0baffd1a558f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1590.503292] env[62405]: DEBUG oslo_vmware.api [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Waiting for the task: (returnval){ [ 1590.503292] env[62405]: value = "task-1947018" [ 1590.503292] env[62405]: _type = "Task" [ 1590.503292] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1590.519632] env[62405]: DEBUG oslo_vmware.api [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Task: {'id': task-1947018, 'name': Rename_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1590.566490] env[62405]: DEBUG nova.network.neutron [req-3bfc16e4-917c-49e5-abae-7194c1f1f9ea req-bff7f121-8323-4ce3-a6ee-d580748427a4 service nova] [instance: 78b4c6ea-6f5b-40d8-8c4a-10332f176e0b] Updated VIF entry in instance network info cache for port 95874447-5114-44c9-8785-0134bd6173f2. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1590.566490] env[62405]: DEBUG nova.network.neutron [req-3bfc16e4-917c-49e5-abae-7194c1f1f9ea req-bff7f121-8323-4ce3-a6ee-d580748427a4 service nova] [instance: 78b4c6ea-6f5b-40d8-8c4a-10332f176e0b] Updating instance_info_cache with network_info: [{"id": "95874447-5114-44c9-8785-0134bd6173f2", "address": "fa:16:3e:5e:aa:b9", "network": {"id": "9e3e6e3d-df77-428f-b577-e4a42e82ec43", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.171", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "69dc3a146cd14230b1180689e2fea090", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31e77685-b4dd-4810-80ef-24115ea9ea62", "external-id": "nsx-vlan-transportzone-56", "segmentation_id": 56, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap95874447-51", "ovs_interfaceid": "95874447-5114-44c9-8785-0134bd6173f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1590.627666] env[62405]: DEBUG nova.network.neutron [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] [instance: 9e73e2ab-1eac-4aca-905f-a8391d3f5a9b] Successfully updated port: 0db09e81-ebc5-4f46-bed2-99bdd6a93b15 {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1590.698629] env[62405]: DEBUG oslo_vmware.api [None req-34e409f5-4968-4fce-8e2c-e9aa257078cb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947016, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1590.734521] env[62405]: DEBUG oslo_vmware.api [None req-9d6ef053-41e4-4b22-9fea-f4aaa6f8dc94 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': task-1947017, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1590.995547] env[62405]: DEBUG nova.compute.utils [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1591.001022] env[62405]: DEBUG nova.compute.manager [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] [instance: 0feaeb5d-9f4a-4166-99b1-f213bc4fa458] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1591.001022] env[62405]: DEBUG nova.network.neutron [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] [instance: 0feaeb5d-9f4a-4166-99b1-f213bc4fa458] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1591.005387] env[62405]: DEBUG nova.network.neutron [-] [instance: ca4d11fe-1d0f-468b-a2f4-21c5b84342ab] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1591.019372] env[62405]: DEBUG oslo_vmware.api [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Task: {'id': task-1947018, 'name': Rename_Task, 'duration_secs': 0.192558} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1591.019641] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] [instance: 78b4c6ea-6f5b-40d8-8c4a-10332f176e0b] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1591.019896] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-154c6a5e-1ce4-490a-9dbd-f480b1a26de5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.027855] env[62405]: DEBUG oslo_vmware.api [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Waiting for the task: (returnval){ [ 1591.027855] env[62405]: value = "task-1947019" [ 1591.027855] env[62405]: _type = "Task" [ 1591.027855] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1591.040938] env[62405]: DEBUG oslo_vmware.api [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Task: {'id': task-1947019, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1591.068792] env[62405]: DEBUG oslo_concurrency.lockutils [req-3bfc16e4-917c-49e5-abae-7194c1f1f9ea req-bff7f121-8323-4ce3-a6ee-d580748427a4 service nova] Releasing lock "refresh_cache-78b4c6ea-6f5b-40d8-8c4a-10332f176e0b" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1591.106844] env[62405]: DEBUG nova.policy [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9de144c120964b4db3259caf5dea43f0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8cf1f39c8aef41df8c86777f80980664', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1591.129458] env[62405]: DEBUG oslo_concurrency.lockutils [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Acquiring lock "refresh_cache-9e73e2ab-1eac-4aca-905f-a8391d3f5a9b" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1591.129577] env[62405]: DEBUG oslo_concurrency.lockutils [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Acquired lock "refresh_cache-9e73e2ab-1eac-4aca-905f-a8391d3f5a9b" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1591.129712] env[62405]: DEBUG nova.network.neutron [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] [instance: 9e73e2ab-1eac-4aca-905f-a8391d3f5a9b] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1591.147848] env[62405]: DEBUG nova.network.neutron [-] [instance: 14dab775-19b4-4d0d-a7ee-67705f7e45ca] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1591.200636] env[62405]: DEBUG oslo_vmware.api [None req-34e409f5-4968-4fce-8e2c-e9aa257078cb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947016, 'name': PowerOnVM_Task, 'duration_secs': 0.559983} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1591.200948] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-34e409f5-4968-4fce-8e2c-e9aa257078cb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: e8ed73c3-fb86-42c3-aae6-b0c8d03149ce] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1591.201562] env[62405]: DEBUG nova.compute.manager [None req-34e409f5-4968-4fce-8e2c-e9aa257078cb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: e8ed73c3-fb86-42c3-aae6-b0c8d03149ce] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1591.202374] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfa5ea4a-4b5c-48be-8f8e-ae0e8283fb2c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.236902] env[62405]: DEBUG oslo_vmware.api [None req-9d6ef053-41e4-4b22-9fea-f4aaa6f8dc94 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': task-1947017, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1591.500974] env[62405]: DEBUG nova.compute.manager [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] [instance: 0feaeb5d-9f4a-4166-99b1-f213bc4fa458] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1591.509789] env[62405]: INFO nova.compute.manager [-] [instance: ca4d11fe-1d0f-468b-a2f4-21c5b84342ab] Took 1.66 seconds to deallocate network for instance. [ 1591.545600] env[62405]: DEBUG oslo_vmware.api [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Task: {'id': task-1947019, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1591.574499] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2e2f12fd-1e79-4ee3-9664-fe5a7c93d33f tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquiring lock "900b95b5-fe5a-46c1-909a-f81b82ced0ef" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1591.574754] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2e2f12fd-1e79-4ee3-9664-fe5a7c93d33f tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Lock "900b95b5-fe5a-46c1-909a-f81b82ced0ef" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1591.575018] env[62405]: INFO nova.compute.manager [None req-2e2f12fd-1e79-4ee3-9664-fe5a7c93d33f tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 900b95b5-fe5a-46c1-909a-f81b82ced0ef] Attaching volume 442e8afb-d4f7-4db7-9a25-37612af22952 to /dev/sdb [ 1591.610878] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99f7b7e8-b34d-46f1-b067-534bbd27386b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.622339] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a09f26ef-8fa9-4d49-b006-893cc713a6b9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.632589] env[62405]: DEBUG nova.compute.manager [req-452390c5-b9be-42ac-87ff-05ebb4ed4bc5 req-4466bf65-864c-4ac3-b00b-41ec9d4f9e54 service nova] [instance: ca4d11fe-1d0f-468b-a2f4-21c5b84342ab] Received event network-vif-deleted-677da2fd-a16d-4c43-b074-8aee4d0abe46 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1591.632846] env[62405]: DEBUG nova.compute.manager [req-452390c5-b9be-42ac-87ff-05ebb4ed4bc5 req-4466bf65-864c-4ac3-b00b-41ec9d4f9e54 service nova] [instance: 14dab775-19b4-4d0d-a7ee-67705f7e45ca] Received event network-vif-deleted-b2b20164-38d1-48ac-a12b-c190f4aa9d22 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1591.642190] env[62405]: DEBUG nova.virt.block_device [None req-2e2f12fd-1e79-4ee3-9664-fe5a7c93d33f tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 900b95b5-fe5a-46c1-909a-f81b82ced0ef] Updating existing volume attachment record: edb17db0-6ec1-4b0c-918e-8664c14c9301 {{(pid=62405) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1591.650361] env[62405]: INFO nova.compute.manager [-] [instance: 14dab775-19b4-4d0d-a7ee-67705f7e45ca] Took 1.52 seconds to deallocate network for instance. [ 1591.661858] env[62405]: DEBUG nova.compute.manager [req-dd0f0bed-159d-499a-b4c3-6783e02a737a req-c3a44b2b-9e7b-480a-aa54-b264bb030171 service nova] [instance: 9e73e2ab-1eac-4aca-905f-a8391d3f5a9b] Received event network-vif-plugged-0db09e81-ebc5-4f46-bed2-99bdd6a93b15 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1591.662091] env[62405]: DEBUG oslo_concurrency.lockutils [req-dd0f0bed-159d-499a-b4c3-6783e02a737a req-c3a44b2b-9e7b-480a-aa54-b264bb030171 service nova] Acquiring lock "9e73e2ab-1eac-4aca-905f-a8391d3f5a9b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1591.662324] env[62405]: DEBUG oslo_concurrency.lockutils [req-dd0f0bed-159d-499a-b4c3-6783e02a737a req-c3a44b2b-9e7b-480a-aa54-b264bb030171 service nova] Lock "9e73e2ab-1eac-4aca-905f-a8391d3f5a9b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1591.662716] env[62405]: DEBUG oslo_concurrency.lockutils [req-dd0f0bed-159d-499a-b4c3-6783e02a737a req-c3a44b2b-9e7b-480a-aa54-b264bb030171 service nova] Lock "9e73e2ab-1eac-4aca-905f-a8391d3f5a9b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1591.662904] env[62405]: DEBUG nova.compute.manager [req-dd0f0bed-159d-499a-b4c3-6783e02a737a req-c3a44b2b-9e7b-480a-aa54-b264bb030171 service nova] [instance: 9e73e2ab-1eac-4aca-905f-a8391d3f5a9b] No waiting events found dispatching network-vif-plugged-0db09e81-ebc5-4f46-bed2-99bdd6a93b15 {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1591.663249] env[62405]: WARNING nova.compute.manager [req-dd0f0bed-159d-499a-b4c3-6783e02a737a req-c3a44b2b-9e7b-480a-aa54-b264bb030171 service nova] [instance: 9e73e2ab-1eac-4aca-905f-a8391d3f5a9b] Received unexpected event network-vif-plugged-0db09e81-ebc5-4f46-bed2-99bdd6a93b15 for instance with vm_state building and task_state spawning. [ 1591.663448] env[62405]: DEBUG nova.compute.manager [req-dd0f0bed-159d-499a-b4c3-6783e02a737a req-c3a44b2b-9e7b-480a-aa54-b264bb030171 service nova] [instance: 9e73e2ab-1eac-4aca-905f-a8391d3f5a9b] Received event network-changed-0db09e81-ebc5-4f46-bed2-99bdd6a93b15 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1591.663902] env[62405]: DEBUG nova.compute.manager [req-dd0f0bed-159d-499a-b4c3-6783e02a737a req-c3a44b2b-9e7b-480a-aa54-b264bb030171 service nova] [instance: 9e73e2ab-1eac-4aca-905f-a8391d3f5a9b] Refreshing instance network info cache due to event network-changed-0db09e81-ebc5-4f46-bed2-99bdd6a93b15. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1591.664075] env[62405]: DEBUG oslo_concurrency.lockutils [req-dd0f0bed-159d-499a-b4c3-6783e02a737a req-c3a44b2b-9e7b-480a-aa54-b264bb030171 service nova] Acquiring lock "refresh_cache-9e73e2ab-1eac-4aca-905f-a8391d3f5a9b" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1591.678869] env[62405]: DEBUG nova.network.neutron [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] [instance: 9e73e2ab-1eac-4aca-905f-a8391d3f5a9b] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1591.721113] env[62405]: DEBUG oslo_concurrency.lockutils [None req-34e409f5-4968-4fce-8e2c-e9aa257078cb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1591.740365] env[62405]: DEBUG oslo_vmware.api [None req-9d6ef053-41e4-4b22-9fea-f4aaa6f8dc94 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': task-1947017, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1591.868487] env[62405]: DEBUG nova.network.neutron [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] [instance: 9e73e2ab-1eac-4aca-905f-a8391d3f5a9b] Updating instance_info_cache with network_info: [{"id": "0db09e81-ebc5-4f46-bed2-99bdd6a93b15", "address": "fa:16:3e:3f:bc:9b", "network": {"id": "9e3e6e3d-df77-428f-b577-e4a42e82ec43", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.27", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "69dc3a146cd14230b1180689e2fea090", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31e77685-b4dd-4810-80ef-24115ea9ea62", "external-id": "nsx-vlan-transportzone-56", "segmentation_id": 56, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0db09e81-eb", "ovs_interfaceid": "0db09e81-ebc5-4f46-bed2-99bdd6a93b15", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1591.894427] env[62405]: DEBUG nova.network.neutron [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] [instance: 0feaeb5d-9f4a-4166-99b1-f213bc4fa458] Successfully created port: feb1471c-63ad-4e63-bd9d-e413dee50694 {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1592.019379] env[62405]: DEBUG oslo_concurrency.lockutils [None req-10ac8d9f-6d2c-433c-a3e6-e6ec33a82234 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1592.040138] env[62405]: DEBUG oslo_vmware.api [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Task: {'id': task-1947019, 'name': PowerOnVM_Task, 'duration_secs': 0.953389} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1592.042664] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] [instance: 78b4c6ea-6f5b-40d8-8c4a-10332f176e0b] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1592.042965] env[62405]: INFO nova.compute.manager [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] [instance: 78b4c6ea-6f5b-40d8-8c4a-10332f176e0b] Took 9.63 seconds to spawn the instance on the hypervisor. [ 1592.043194] env[62405]: DEBUG nova.compute.manager [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] [instance: 78b4c6ea-6f5b-40d8-8c4a-10332f176e0b] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1592.044301] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1032d7f-d8d2-4c9f-89d2-49ea7d01d2f5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.115052] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84ce0adf-eaa3-4bbd-832b-62207f072ab6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.124979] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29552f21-12a5-44a3-97ee-b4dd2240be90 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.158127] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af0b91a1-349e-46f2-9cc8-0124c0b466b2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.166156] env[62405]: DEBUG oslo_concurrency.lockutils [None req-73596299-093f-48b9-ba92-afb8961301ad tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1592.167525] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6dfd336-4342-4a08-952c-6516ee4eff19 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.187374] env[62405]: DEBUG nova.compute.provider_tree [None req-664be70a-5a90-415b-93d8-68e8e14e073c tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1592.238892] env[62405]: DEBUG oslo_vmware.api [None req-9d6ef053-41e4-4b22-9fea-f4aaa6f8dc94 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': task-1947017, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1592.375034] env[62405]: DEBUG oslo_concurrency.lockutils [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Releasing lock "refresh_cache-9e73e2ab-1eac-4aca-905f-a8391d3f5a9b" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1592.375480] env[62405]: DEBUG nova.compute.manager [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] [instance: 9e73e2ab-1eac-4aca-905f-a8391d3f5a9b] Instance network_info: |[{"id": "0db09e81-ebc5-4f46-bed2-99bdd6a93b15", "address": "fa:16:3e:3f:bc:9b", "network": {"id": "9e3e6e3d-df77-428f-b577-e4a42e82ec43", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.27", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "69dc3a146cd14230b1180689e2fea090", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31e77685-b4dd-4810-80ef-24115ea9ea62", "external-id": "nsx-vlan-transportzone-56", "segmentation_id": 56, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0db09e81-eb", "ovs_interfaceid": "0db09e81-ebc5-4f46-bed2-99bdd6a93b15", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1592.375801] env[62405]: DEBUG oslo_concurrency.lockutils [req-dd0f0bed-159d-499a-b4c3-6783e02a737a req-c3a44b2b-9e7b-480a-aa54-b264bb030171 service nova] Acquired lock "refresh_cache-9e73e2ab-1eac-4aca-905f-a8391d3f5a9b" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1592.375985] env[62405]: DEBUG nova.network.neutron [req-dd0f0bed-159d-499a-b4c3-6783e02a737a req-c3a44b2b-9e7b-480a-aa54-b264bb030171 service nova] [instance: 9e73e2ab-1eac-4aca-905f-a8391d3f5a9b] Refreshing network info cache for port 0db09e81-ebc5-4f46-bed2-99bdd6a93b15 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1592.377341] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] [instance: 9e73e2ab-1eac-4aca-905f-a8391d3f5a9b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3f:bc:9b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '31e77685-b4dd-4810-80ef-24115ea9ea62', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0db09e81-ebc5-4f46-bed2-99bdd6a93b15', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1592.387468] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Creating folder: Project (742aa597f5714acf813be270cf2fae15). Parent ref: group-v401284. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1592.388622] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1db973b1-d72c-4f1b-abae-2074ade169dc {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.400653] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Created folder: Project (742aa597f5714acf813be270cf2fae15) in parent group-v401284. [ 1592.400847] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Creating folder: Instances. Parent ref: group-v401386. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1592.401081] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-000100e4-d226-4528-85cd-01f906a5d4cd {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.412868] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Created folder: Instances in parent group-v401386. [ 1592.413025] env[62405]: DEBUG oslo.service.loopingcall [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1592.413212] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9e73e2ab-1eac-4aca-905f-a8391d3f5a9b] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1592.413408] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1415309a-084e-4543-9ba9-c95ecf77159d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.434047] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1592.434047] env[62405]: value = "task-1947025" [ 1592.434047] env[62405]: _type = "Task" [ 1592.434047] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1592.441418] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947025, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1592.517380] env[62405]: DEBUG nova.compute.manager [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] [instance: 0feaeb5d-9f4a-4166-99b1-f213bc4fa458] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1592.544206] env[62405]: DEBUG nova.virt.hardware [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1592.544481] env[62405]: DEBUG nova.virt.hardware [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1592.544643] env[62405]: DEBUG nova.virt.hardware [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1592.544827] env[62405]: DEBUG nova.virt.hardware [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1592.545133] env[62405]: DEBUG nova.virt.hardware [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1592.545326] env[62405]: DEBUG nova.virt.hardware [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1592.545539] env[62405]: DEBUG nova.virt.hardware [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1592.545699] env[62405]: DEBUG nova.virt.hardware [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1592.546037] env[62405]: DEBUG nova.virt.hardware [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1592.546315] env[62405]: DEBUG nova.virt.hardware [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1592.546515] env[62405]: DEBUG nova.virt.hardware [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1592.547374] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6592717c-2feb-4517-80e2-1c0805d02922 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.562766] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbb97c51-4c88-4145-8d90-04d2265fa00b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.566719] env[62405]: INFO nova.compute.manager [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] [instance: 78b4c6ea-6f5b-40d8-8c4a-10332f176e0b] Took 42.04 seconds to build instance. [ 1592.631245] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Acquiring lock "3c9487ff-2092-4cde-82d5-b38e5bc5c6e3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1592.631484] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Lock "3c9487ff-2092-4cde-82d5-b38e5bc5c6e3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1592.688447] env[62405]: DEBUG nova.scheduler.client.report [None req-664be70a-5a90-415b-93d8-68e8e14e073c tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1592.737166] env[62405]: DEBUG oslo_vmware.api [None req-9d6ef053-41e4-4b22-9fea-f4aaa6f8dc94 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': task-1947017, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1592.950061] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947025, 'name': CreateVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1593.073652] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e3f352ac-14e1-4a25-81cd-1513b6c07bd5 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Lock "78b4c6ea-6f5b-40d8-8c4a-10332f176e0b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 68.208s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1593.131173] env[62405]: DEBUG nova.network.neutron [req-dd0f0bed-159d-499a-b4c3-6783e02a737a req-c3a44b2b-9e7b-480a-aa54-b264bb030171 service nova] [instance: 9e73e2ab-1eac-4aca-905f-a8391d3f5a9b] Updated VIF entry in instance network info cache for port 0db09e81-ebc5-4f46-bed2-99bdd6a93b15. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1593.131615] env[62405]: DEBUG nova.network.neutron [req-dd0f0bed-159d-499a-b4c3-6783e02a737a req-c3a44b2b-9e7b-480a-aa54-b264bb030171 service nova] [instance: 9e73e2ab-1eac-4aca-905f-a8391d3f5a9b] Updating instance_info_cache with network_info: [{"id": "0db09e81-ebc5-4f46-bed2-99bdd6a93b15", "address": "fa:16:3e:3f:bc:9b", "network": {"id": "9e3e6e3d-df77-428f-b577-e4a42e82ec43", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.27", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "69dc3a146cd14230b1180689e2fea090", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31e77685-b4dd-4810-80ef-24115ea9ea62", "external-id": "nsx-vlan-transportzone-56", "segmentation_id": 56, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0db09e81-eb", "ovs_interfaceid": "0db09e81-ebc5-4f46-bed2-99bdd6a93b15", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1593.194781] env[62405]: DEBUG oslo_concurrency.lockutils [None req-664be70a-5a90-415b-93d8-68e8e14e073c tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.701s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1593.195051] env[62405]: DEBUG nova.compute.manager [None req-664be70a-5a90-415b-93d8-68e8e14e073c tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] [instance: a73579d1-8647-49fe-98ce-0baffd1a558f] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1593.199743] env[62405]: DEBUG oslo_concurrency.lockutils [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.318s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1593.202251] env[62405]: INFO nova.compute.claims [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 23748dfd-7c60-41db-8acb-7b49cf1c27db] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1593.242524] env[62405]: DEBUG oslo_vmware.api [None req-9d6ef053-41e4-4b22-9fea-f4aaa6f8dc94 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': task-1947017, 'name': CreateSnapshot_Task, 'duration_secs': 2.891276} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1593.244256] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-9d6ef053-41e4-4b22-9fea-f4aaa6f8dc94 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b8ff115b-64f1-4584-afa2-478c5e6b726b] Created Snapshot of the VM instance {{(pid=62405) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1593.245451] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2842b79-e5f8-4c01-b9b8-2b59a22188eb {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.448347] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947025, 'name': CreateVM_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1593.577183] env[62405]: DEBUG nova.compute.manager [None req-b644e2f9-cda9-46a3-bf08-3bcc97fc1d90 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] [instance: 0f2d81cb-da2a-4664-b9a2-b8c3c38ddc73] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1593.635154] env[62405]: DEBUG oslo_concurrency.lockutils [req-dd0f0bed-159d-499a-b4c3-6783e02a737a req-c3a44b2b-9e7b-480a-aa54-b264bb030171 service nova] Releasing lock "refresh_cache-9e73e2ab-1eac-4aca-905f-a8391d3f5a9b" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1593.639224] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5ce318a9-2d1f-4297-a62d-a8a0f0e887c1 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquiring lock "e8ed73c3-fb86-42c3-aae6-b0c8d03149ce" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1593.640192] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5ce318a9-2d1f-4297-a62d-a8a0f0e887c1 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Lock "e8ed73c3-fb86-42c3-aae6-b0c8d03149ce" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1593.640192] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5ce318a9-2d1f-4297-a62d-a8a0f0e887c1 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquiring lock "e8ed73c3-fb86-42c3-aae6-b0c8d03149ce-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1593.640436] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5ce318a9-2d1f-4297-a62d-a8a0f0e887c1 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Lock "e8ed73c3-fb86-42c3-aae6-b0c8d03149ce-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1593.640520] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5ce318a9-2d1f-4297-a62d-a8a0f0e887c1 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Lock "e8ed73c3-fb86-42c3-aae6-b0c8d03149ce-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1593.642739] env[62405]: INFO nova.compute.manager [None req-5ce318a9-2d1f-4297-a62d-a8a0f0e887c1 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: e8ed73c3-fb86-42c3-aae6-b0c8d03149ce] Terminating instance [ 1593.701104] env[62405]: DEBUG nova.compute.utils [None req-664be70a-5a90-415b-93d8-68e8e14e073c tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1593.702773] env[62405]: DEBUG nova.compute.manager [None req-664be70a-5a90-415b-93d8-68e8e14e073c tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] [instance: a73579d1-8647-49fe-98ce-0baffd1a558f] Not allocating networking since 'none' was specified. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1593.769019] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-9d6ef053-41e4-4b22-9fea-f4aaa6f8dc94 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b8ff115b-64f1-4584-afa2-478c5e6b726b] Creating linked-clone VM from snapshot {{(pid=62405) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1593.769019] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-99f37b83-c441-4658-8e16-e0517e6b6ff3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.779291] env[62405]: DEBUG oslo_vmware.api [None req-9d6ef053-41e4-4b22-9fea-f4aaa6f8dc94 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Waiting for the task: (returnval){ [ 1593.779291] env[62405]: value = "task-1947026" [ 1593.779291] env[62405]: _type = "Task" [ 1593.779291] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1593.788128] env[62405]: DEBUG oslo_vmware.api [None req-9d6ef053-41e4-4b22-9fea-f4aaa6f8dc94 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': task-1947026, 'name': CloneVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1593.946253] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947025, 'name': CreateVM_Task, 'duration_secs': 1.031157} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1593.946253] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9e73e2ab-1eac-4aca-905f-a8391d3f5a9b] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1593.947499] env[62405]: DEBUG oslo_concurrency.lockutils [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1593.947499] env[62405]: DEBUG oslo_concurrency.lockutils [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1593.947499] env[62405]: DEBUG oslo_concurrency.lockutils [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1593.947742] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ae85abae-f22b-4672-b6bd-ec0a04adf905 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.954925] env[62405]: DEBUG oslo_vmware.api [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Waiting for the task: (returnval){ [ 1593.954925] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5228701c-1635-9ba3-229b-77c96b2af525" [ 1593.954925] env[62405]: _type = "Task" [ 1593.954925] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1593.963419] env[62405]: DEBUG oslo_vmware.api [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5228701c-1635-9ba3-229b-77c96b2af525, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1594.098234] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b644e2f9-cda9-46a3-bf08-3bcc97fc1d90 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1594.147239] env[62405]: DEBUG nova.compute.manager [None req-5ce318a9-2d1f-4297-a62d-a8a0f0e887c1 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: e8ed73c3-fb86-42c3-aae6-b0c8d03149ce] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1594.147485] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-5ce318a9-2d1f-4297-a62d-a8a0f0e887c1 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: e8ed73c3-fb86-42c3-aae6-b0c8d03149ce] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1594.148426] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed58a252-dde1-4fa0-84d4-18ee6e138549 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.156684] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ce318a9-2d1f-4297-a62d-a8a0f0e887c1 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: e8ed73c3-fb86-42c3-aae6-b0c8d03149ce] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1594.156918] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bf9fadbb-0655-4881-8026-fb54a774b774 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.168023] env[62405]: DEBUG oslo_vmware.api [None req-5ce318a9-2d1f-4297-a62d-a8a0f0e887c1 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for the task: (returnval){ [ 1594.168023] env[62405]: value = "task-1947028" [ 1594.168023] env[62405]: _type = "Task" [ 1594.168023] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1594.178666] env[62405]: DEBUG oslo_vmware.api [None req-5ce318a9-2d1f-4297-a62d-a8a0f0e887c1 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947028, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1594.204161] env[62405]: DEBUG nova.compute.manager [None req-664be70a-5a90-415b-93d8-68e8e14e073c tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] [instance: a73579d1-8647-49fe-98ce-0baffd1a558f] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1594.292062] env[62405]: DEBUG oslo_vmware.api [None req-9d6ef053-41e4-4b22-9fea-f4aaa6f8dc94 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': task-1947026, 'name': CloneVM_Task} progress is 94%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1594.402297] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Acquiring lock "a9f83357-4898-44ff-a6d8-ea6621453de9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1594.402297] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Lock "a9f83357-4898-44ff-a6d8-ea6621453de9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1594.447549] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5974aa0f-586f-47ba-95a5-8e34a83e9507 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquiring lock "a6a0e918-425d-44de-a22b-8779e9108533" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1594.448419] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5974aa0f-586f-47ba-95a5-8e34a83e9507 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Lock "a6a0e918-425d-44de-a22b-8779e9108533" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1594.467617] env[62405]: DEBUG oslo_vmware.api [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5228701c-1635-9ba3-229b-77c96b2af525, 'name': SearchDatastore_Task, 'duration_secs': 0.020781} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1594.468046] env[62405]: DEBUG oslo_concurrency.lockutils [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1594.468452] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] [instance: 9e73e2ab-1eac-4aca-905f-a8391d3f5a9b] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1594.469180] env[62405]: DEBUG oslo_concurrency.lockutils [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1594.469180] env[62405]: DEBUG oslo_concurrency.lockutils [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1594.469180] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1594.469396] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a1747d9a-fb0a-4ae0-8187-3ec79b02421d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.481099] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1594.481099] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1594.481888] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d278f016-e608-4279-ba62-16756299c1c7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.487150] env[62405]: DEBUG nova.compute.manager [req-509ce42b-2f33-4aee-b84a-b201ef135a9e req-7f1d72e7-d030-427f-9c3f-4255fbc6b7be service nova] [instance: 0feaeb5d-9f4a-4166-99b1-f213bc4fa458] Received event network-vif-plugged-feb1471c-63ad-4e63-bd9d-e413dee50694 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1594.487425] env[62405]: DEBUG oslo_concurrency.lockutils [req-509ce42b-2f33-4aee-b84a-b201ef135a9e req-7f1d72e7-d030-427f-9c3f-4255fbc6b7be service nova] Acquiring lock "0feaeb5d-9f4a-4166-99b1-f213bc4fa458-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1594.487711] env[62405]: DEBUG oslo_concurrency.lockutils [req-509ce42b-2f33-4aee-b84a-b201ef135a9e req-7f1d72e7-d030-427f-9c3f-4255fbc6b7be service nova] Lock "0feaeb5d-9f4a-4166-99b1-f213bc4fa458-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1594.488018] env[62405]: DEBUG oslo_concurrency.lockutils [req-509ce42b-2f33-4aee-b84a-b201ef135a9e req-7f1d72e7-d030-427f-9c3f-4255fbc6b7be service nova] Lock "0feaeb5d-9f4a-4166-99b1-f213bc4fa458-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1594.488758] env[62405]: DEBUG nova.compute.manager [req-509ce42b-2f33-4aee-b84a-b201ef135a9e req-7f1d72e7-d030-427f-9c3f-4255fbc6b7be service nova] [instance: 0feaeb5d-9f4a-4166-99b1-f213bc4fa458] No waiting events found dispatching network-vif-plugged-feb1471c-63ad-4e63-bd9d-e413dee50694 {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1594.488758] env[62405]: WARNING nova.compute.manager [req-509ce42b-2f33-4aee-b84a-b201ef135a9e req-7f1d72e7-d030-427f-9c3f-4255fbc6b7be service nova] [instance: 0feaeb5d-9f4a-4166-99b1-f213bc4fa458] Received unexpected event network-vif-plugged-feb1471c-63ad-4e63-bd9d-e413dee50694 for instance with vm_state building and task_state spawning. [ 1594.493632] env[62405]: DEBUG oslo_vmware.api [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Waiting for the task: (returnval){ [ 1594.493632] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]525f655c-e375-bcfc-6769-3fe153edc9db" [ 1594.493632] env[62405]: _type = "Task" [ 1594.493632] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1594.505704] env[62405]: DEBUG oslo_vmware.api [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]525f655c-e375-bcfc-6769-3fe153edc9db, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1594.565220] env[62405]: DEBUG nova.network.neutron [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] [instance: 0feaeb5d-9f4a-4166-99b1-f213bc4fa458] Successfully updated port: feb1471c-63ad-4e63-bd9d-e413dee50694 {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1594.676308] env[62405]: DEBUG oslo_vmware.api [None req-5ce318a9-2d1f-4297-a62d-a8a0f0e887c1 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947028, 'name': PowerOffVM_Task, 'duration_secs': 0.305861} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1594.676592] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ce318a9-2d1f-4297-a62d-a8a0f0e887c1 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: e8ed73c3-fb86-42c3-aae6-b0c8d03149ce] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1594.676803] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-5ce318a9-2d1f-4297-a62d-a8a0f0e887c1 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: e8ed73c3-fb86-42c3-aae6-b0c8d03149ce] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1594.677068] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e659ed1d-a712-496b-86e2-b817b02368e3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.755174] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-5ce318a9-2d1f-4297-a62d-a8a0f0e887c1 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: e8ed73c3-fb86-42c3-aae6-b0c8d03149ce] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1594.755421] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-5ce318a9-2d1f-4297-a62d-a8a0f0e887c1 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: e8ed73c3-fb86-42c3-aae6-b0c8d03149ce] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1594.755661] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ce318a9-2d1f-4297-a62d-a8a0f0e887c1 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Deleting the datastore file [datastore1] e8ed73c3-fb86-42c3-aae6-b0c8d03149ce {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1594.756782] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-277fec0b-9b29-4759-923c-47ae6a4ac6d1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.764193] env[62405]: DEBUG oslo_vmware.api [None req-5ce318a9-2d1f-4297-a62d-a8a0f0e887c1 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for the task: (returnval){ [ 1594.764193] env[62405]: value = "task-1947030" [ 1594.764193] env[62405]: _type = "Task" [ 1594.764193] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1594.772057] env[62405]: DEBUG oslo_vmware.api [None req-5ce318a9-2d1f-4297-a62d-a8a0f0e887c1 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947030, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1594.788677] env[62405]: DEBUG oslo_vmware.api [None req-9d6ef053-41e4-4b22-9fea-f4aaa6f8dc94 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': task-1947026, 'name': CloneVM_Task} progress is 94%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1594.885469] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f9a120d-d445-4ea3-a79f-cbdd889cb7f7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.897971] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dc7c85c-791a-47fa-9779-e2a13567e81b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.932263] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76f23fd0-522e-411d-b395-d21a60c24c9f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.939077] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d10ebacc-e98e-45c2-8ff1-184defa39c35 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.953644] env[62405]: DEBUG nova.compute.provider_tree [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1595.004490] env[62405]: DEBUG oslo_vmware.api [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]525f655c-e375-bcfc-6769-3fe153edc9db, 'name': SearchDatastore_Task, 'duration_secs': 0.012936} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1595.005572] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-29f74782-a5b3-4a8e-88d7-217558a864eb {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.012141] env[62405]: DEBUG oslo_vmware.api [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Waiting for the task: (returnval){ [ 1595.012141] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]526ec24c-f046-af0f-0657-2b809ac24a89" [ 1595.012141] env[62405]: _type = "Task" [ 1595.012141] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1595.021160] env[62405]: DEBUG oslo_vmware.api [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]526ec24c-f046-af0f-0657-2b809ac24a89, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1595.073144] env[62405]: DEBUG oslo_concurrency.lockutils [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Acquiring lock "refresh_cache-0feaeb5d-9f4a-4166-99b1-f213bc4fa458" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1595.073144] env[62405]: DEBUG oslo_concurrency.lockutils [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Acquired lock "refresh_cache-0feaeb5d-9f4a-4166-99b1-f213bc4fa458" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1595.073144] env[62405]: DEBUG nova.network.neutron [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] [instance: 0feaeb5d-9f4a-4166-99b1-f213bc4fa458] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1595.217119] env[62405]: DEBUG nova.compute.manager [None req-664be70a-5a90-415b-93d8-68e8e14e073c tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] [instance: a73579d1-8647-49fe-98ce-0baffd1a558f] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1595.248878] env[62405]: DEBUG nova.virt.hardware [None req-664be70a-5a90-415b-93d8-68e8e14e073c tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1595.249210] env[62405]: DEBUG nova.virt.hardware [None req-664be70a-5a90-415b-93d8-68e8e14e073c tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1595.249421] env[62405]: DEBUG nova.virt.hardware [None req-664be70a-5a90-415b-93d8-68e8e14e073c tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1595.249555] env[62405]: DEBUG nova.virt.hardware [None req-664be70a-5a90-415b-93d8-68e8e14e073c tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1595.249705] env[62405]: DEBUG nova.virt.hardware [None req-664be70a-5a90-415b-93d8-68e8e14e073c tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1595.249855] env[62405]: DEBUG nova.virt.hardware [None req-664be70a-5a90-415b-93d8-68e8e14e073c tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1595.250128] env[62405]: DEBUG nova.virt.hardware [None req-664be70a-5a90-415b-93d8-68e8e14e073c tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1595.250276] env[62405]: DEBUG nova.virt.hardware [None req-664be70a-5a90-415b-93d8-68e8e14e073c tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1595.250460] env[62405]: DEBUG nova.virt.hardware [None req-664be70a-5a90-415b-93d8-68e8e14e073c tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1595.250636] env[62405]: DEBUG nova.virt.hardware [None req-664be70a-5a90-415b-93d8-68e8e14e073c tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1595.250837] env[62405]: DEBUG nova.virt.hardware [None req-664be70a-5a90-415b-93d8-68e8e14e073c tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1595.251793] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff01627d-1dad-4b45-b928-bd6a3287760b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.260668] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89251a94-5839-4f01-be34-0bb4527a006e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.274563] env[62405]: DEBUG oslo_vmware.api [None req-5ce318a9-2d1f-4297-a62d-a8a0f0e887c1 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947030, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.312823} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1595.282499] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ce318a9-2d1f-4297-a62d-a8a0f0e887c1 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1595.282729] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-5ce318a9-2d1f-4297-a62d-a8a0f0e887c1 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: e8ed73c3-fb86-42c3-aae6-b0c8d03149ce] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1595.283028] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-5ce318a9-2d1f-4297-a62d-a8a0f0e887c1 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: e8ed73c3-fb86-42c3-aae6-b0c8d03149ce] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1595.283092] env[62405]: INFO nova.compute.manager [None req-5ce318a9-2d1f-4297-a62d-a8a0f0e887c1 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: e8ed73c3-fb86-42c3-aae6-b0c8d03149ce] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1595.283333] env[62405]: DEBUG oslo.service.loopingcall [None req-5ce318a9-2d1f-4297-a62d-a8a0f0e887c1 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1595.283977] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-664be70a-5a90-415b-93d8-68e8e14e073c tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] [instance: a73579d1-8647-49fe-98ce-0baffd1a558f] Instance VIF info [] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1595.289999] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-664be70a-5a90-415b-93d8-68e8e14e073c tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Creating folder: Project (82cf2aa3f7f84e00a35fb10072803577). Parent ref: group-v401284. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1595.290382] env[62405]: DEBUG nova.compute.manager [-] [instance: e8ed73c3-fb86-42c3-aae6-b0c8d03149ce] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1595.290493] env[62405]: DEBUG nova.network.neutron [-] [instance: e8ed73c3-fb86-42c3-aae6-b0c8d03149ce] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1595.295680] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-00fb6e2f-60ee-4084-9e8d-0570c4819dbf {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.305018] env[62405]: DEBUG oslo_vmware.api [None req-9d6ef053-41e4-4b22-9fea-f4aaa6f8dc94 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': task-1947026, 'name': CloneVM_Task, 'duration_secs': 1.486735} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1595.305018] env[62405]: INFO nova.virt.vmwareapi.vmops [None req-9d6ef053-41e4-4b22-9fea-f4aaa6f8dc94 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b8ff115b-64f1-4584-afa2-478c5e6b726b] Created linked-clone VM from snapshot [ 1595.305216] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b37902fe-4ff6-4e83-a85c-a8d343a91aa3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.309833] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-664be70a-5a90-415b-93d8-68e8e14e073c tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Created folder: Project (82cf2aa3f7f84e00a35fb10072803577) in parent group-v401284. [ 1595.310043] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-664be70a-5a90-415b-93d8-68e8e14e073c tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Creating folder: Instances. Parent ref: group-v401392. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1595.310635] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5be083d9-12af-4ad9-a4c3-a1ee6b527260 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.315557] env[62405]: DEBUG nova.virt.vmwareapi.images [None req-9d6ef053-41e4-4b22-9fea-f4aaa6f8dc94 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b8ff115b-64f1-4584-afa2-478c5e6b726b] Uploading image e4cce3e1-804f-4642-8e51-dd15f688fec0 {{(pid=62405) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1595.328419] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-664be70a-5a90-415b-93d8-68e8e14e073c tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Created folder: Instances in parent group-v401392. [ 1595.328683] env[62405]: DEBUG oslo.service.loopingcall [None req-664be70a-5a90-415b-93d8-68e8e14e073c tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1595.328879] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a73579d1-8647-49fe-98ce-0baffd1a558f] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1595.329125] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c8e82f15-afe6-413c-a3da-310939f66ef6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.350789] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1595.350789] env[62405]: value = "task-1947033" [ 1595.350789] env[62405]: _type = "Task" [ 1595.350789] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1595.352684] env[62405]: DEBUG oslo_vmware.rw_handles [None req-9d6ef053-41e4-4b22-9fea-f4aaa6f8dc94 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1595.352684] env[62405]: value = "vm-401391" [ 1595.352684] env[62405]: _type = "VirtualMachine" [ 1595.352684] env[62405]: }. {{(pid=62405) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1595.352962] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-ce73ae69-4500-4f34-a229-539e5d916f8e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.363575] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947033, 'name': CreateVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1595.364911] env[62405]: DEBUG oslo_vmware.rw_handles [None req-9d6ef053-41e4-4b22-9fea-f4aaa6f8dc94 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Lease: (returnval){ [ 1595.364911] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]526bc467-3187-c509-806b-4a4f35d34285" [ 1595.364911] env[62405]: _type = "HttpNfcLease" [ 1595.364911] env[62405]: } obtained for exporting VM: (result){ [ 1595.364911] env[62405]: value = "vm-401391" [ 1595.364911] env[62405]: _type = "VirtualMachine" [ 1595.364911] env[62405]: }. {{(pid=62405) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1595.365181] env[62405]: DEBUG oslo_vmware.api [None req-9d6ef053-41e4-4b22-9fea-f4aaa6f8dc94 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Waiting for the lease: (returnval){ [ 1595.365181] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]526bc467-3187-c509-806b-4a4f35d34285" [ 1595.365181] env[62405]: _type = "HttpNfcLease" [ 1595.365181] env[62405]: } to be ready. {{(pid=62405) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1595.370877] env[62405]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1595.370877] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]526bc467-3187-c509-806b-4a4f35d34285" [ 1595.370877] env[62405]: _type = "HttpNfcLease" [ 1595.370877] env[62405]: } is initializing. {{(pid=62405) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1595.457177] env[62405]: DEBUG nova.scheduler.client.report [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1595.522161] env[62405]: DEBUG oslo_vmware.api [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]526ec24c-f046-af0f-0657-2b809ac24a89, 'name': SearchDatastore_Task, 'duration_secs': 0.017042} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1595.522498] env[62405]: DEBUG oslo_concurrency.lockutils [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1595.522759] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 9e73e2ab-1eac-4aca-905f-a8391d3f5a9b/9e73e2ab-1eac-4aca-905f-a8391d3f5a9b.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1595.523036] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bfd09e86-2a8b-4cc5-8e7e-7ca25d2a2fae {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.530155] env[62405]: DEBUG oslo_vmware.api [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Waiting for the task: (returnval){ [ 1595.530155] env[62405]: value = "task-1947035" [ 1595.530155] env[62405]: _type = "Task" [ 1595.530155] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1595.539058] env[62405]: DEBUG oslo_vmware.api [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Task: {'id': task-1947035, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1595.612090] env[62405]: DEBUG nova.network.neutron [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] [instance: 0feaeb5d-9f4a-4166-99b1-f213bc4fa458] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1595.721732] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Acquiring lock "6213702e-8e39-4342-b62f-2c9495017bf9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1595.722206] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Lock "6213702e-8e39-4342-b62f-2c9495017bf9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1595.789053] env[62405]: DEBUG nova.network.neutron [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] [instance: 0feaeb5d-9f4a-4166-99b1-f213bc4fa458] Updating instance_info_cache with network_info: [{"id": "feb1471c-63ad-4e63-bd9d-e413dee50694", "address": "fa:16:3e:17:75:40", "network": {"id": "bf574ed2-2a7e-4cf2-aa38-0adccf456674", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-2099360932-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8cf1f39c8aef41df8c86777f80980664", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60567ee6-01d0-4b16-9c7a-4a896827d6eb", "external-id": "nsx-vlan-transportzone-28", "segmentation_id": 28, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfeb1471c-63", "ovs_interfaceid": "feb1471c-63ad-4e63-bd9d-e413dee50694", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1595.864219] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947033, 'name': CreateVM_Task, 'duration_secs': 0.310991} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1595.864404] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a73579d1-8647-49fe-98ce-0baffd1a558f] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1595.864921] env[62405]: DEBUG oslo_concurrency.lockutils [None req-664be70a-5a90-415b-93d8-68e8e14e073c tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1595.865109] env[62405]: DEBUG oslo_concurrency.lockutils [None req-664be70a-5a90-415b-93d8-68e8e14e073c tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1595.865504] env[62405]: DEBUG oslo_concurrency.lockutils [None req-664be70a-5a90-415b-93d8-68e8e14e073c tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1595.865839] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6f019901-0473-4dae-95e4-5f8a17434887 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.875050] env[62405]: DEBUG oslo_vmware.api [None req-664be70a-5a90-415b-93d8-68e8e14e073c tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Waiting for the task: (returnval){ [ 1595.875050] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]523ae4ce-55c6-117c-7946-519b8134af03" [ 1595.875050] env[62405]: _type = "Task" [ 1595.875050] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1595.876953] env[62405]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1595.876953] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]526bc467-3187-c509-806b-4a4f35d34285" [ 1595.876953] env[62405]: _type = "HttpNfcLease" [ 1595.876953] env[62405]: } is ready. {{(pid=62405) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1595.880345] env[62405]: DEBUG oslo_vmware.rw_handles [None req-9d6ef053-41e4-4b22-9fea-f4aaa6f8dc94 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1595.880345] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]526bc467-3187-c509-806b-4a4f35d34285" [ 1595.880345] env[62405]: _type = "HttpNfcLease" [ 1595.880345] env[62405]: }. {{(pid=62405) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1595.881159] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1a3e1c5-3dc9-4649-9186-e81cc418f191 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.890062] env[62405]: DEBUG oslo_vmware.api [None req-664be70a-5a90-415b-93d8-68e8e14e073c tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]523ae4ce-55c6-117c-7946-519b8134af03, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1595.893433] env[62405]: DEBUG oslo_vmware.rw_handles [None req-9d6ef053-41e4-4b22-9fea-f4aaa6f8dc94 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52d3c43c-bb71-e201-5bdf-a7ce93428a4c/disk-0.vmdk from lease info. {{(pid=62405) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1595.893562] env[62405]: DEBUG oslo_vmware.rw_handles [None req-9d6ef053-41e4-4b22-9fea-f4aaa6f8dc94 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52d3c43c-bb71-e201-5bdf-a7ce93428a4c/disk-0.vmdk for reading. {{(pid=62405) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1595.963394] env[62405]: DEBUG oslo_concurrency.lockutils [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.764s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1595.964115] env[62405]: DEBUG nova.compute.manager [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 23748dfd-7c60-41db-8acb-7b49cf1c27db] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1595.967326] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8aeb1831-33a5-4b84-9f03-3c806576c207 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.730s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1595.967581] env[62405]: DEBUG nova.objects.instance [None req-8aeb1831-33a5-4b84-9f03-3c806576c207 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Lazy-loading 'resources' on Instance uuid ca0aca02-4b99-4393-900c-b9cb0dad55c7 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1596.044771] env[62405]: DEBUG oslo_vmware.api [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Task: {'id': task-1947035, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1596.129032] env[62405]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-6861cb87-51ed-414c-97e9-813e4b8fe97f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.154889] env[62405]: DEBUG nova.network.neutron [-] [instance: e8ed73c3-fb86-42c3-aae6-b0c8d03149ce] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1596.189429] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-2e2f12fd-1e79-4ee3-9664-fe5a7c93d33f tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 900b95b5-fe5a-46c1-909a-f81b82ced0ef] Volume attach. Driver type: vmdk {{(pid=62405) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1596.189642] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-2e2f12fd-1e79-4ee3-9664-fe5a7c93d33f tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 900b95b5-fe5a-46c1-909a-f81b82ced0ef] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-401389', 'volume_id': '442e8afb-d4f7-4db7-9a25-37612af22952', 'name': 'volume-442e8afb-d4f7-4db7-9a25-37612af22952', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '900b95b5-fe5a-46c1-909a-f81b82ced0ef', 'attached_at': '', 'detached_at': '', 'volume_id': '442e8afb-d4f7-4db7-9a25-37612af22952', 'serial': '442e8afb-d4f7-4db7-9a25-37612af22952'} {{(pid=62405) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1596.191867] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0ab1afb-9d11-4f91-ab69-1315843cdb6a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.210668] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24b3282d-9a63-4134-a23b-55704ab6d568 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.237304] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-2e2f12fd-1e79-4ee3-9664-fe5a7c93d33f tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 900b95b5-fe5a-46c1-909a-f81b82ced0ef] Reconfiguring VM instance instance-0000001e to attach disk [datastore1] volume-442e8afb-d4f7-4db7-9a25-37612af22952/volume-442e8afb-d4f7-4db7-9a25-37612af22952.vmdk or device None with type thin {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1596.237650] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-774c7802-c7b4-483c-b1ef-d625c79fe1e6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.257612] env[62405]: DEBUG oslo_vmware.api [None req-2e2f12fd-1e79-4ee3-9664-fe5a7c93d33f tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for the task: (returnval){ [ 1596.257612] env[62405]: value = "task-1947036" [ 1596.257612] env[62405]: _type = "Task" [ 1596.257612] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1596.266328] env[62405]: DEBUG oslo_vmware.api [None req-2e2f12fd-1e79-4ee3-9664-fe5a7c93d33f tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947036, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1596.292418] env[62405]: DEBUG oslo_concurrency.lockutils [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Releasing lock "refresh_cache-0feaeb5d-9f4a-4166-99b1-f213bc4fa458" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1596.292813] env[62405]: DEBUG nova.compute.manager [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] [instance: 0feaeb5d-9f4a-4166-99b1-f213bc4fa458] Instance network_info: |[{"id": "feb1471c-63ad-4e63-bd9d-e413dee50694", "address": "fa:16:3e:17:75:40", "network": {"id": "bf574ed2-2a7e-4cf2-aa38-0adccf456674", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-2099360932-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8cf1f39c8aef41df8c86777f80980664", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60567ee6-01d0-4b16-9c7a-4a896827d6eb", "external-id": "nsx-vlan-transportzone-28", "segmentation_id": 28, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfeb1471c-63", "ovs_interfaceid": "feb1471c-63ad-4e63-bd9d-e413dee50694", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1596.293355] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] [instance: 0feaeb5d-9f4a-4166-99b1-f213bc4fa458] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:17:75:40', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '60567ee6-01d0-4b16-9c7a-4a896827d6eb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'feb1471c-63ad-4e63-bd9d-e413dee50694', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1596.302635] env[62405]: DEBUG oslo.service.loopingcall [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1596.302950] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0feaeb5d-9f4a-4166-99b1-f213bc4fa458] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1596.303147] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f4381806-687e-444e-a6e4-9723b6a0c9fc {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.326735] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1596.326735] env[62405]: value = "task-1947037" [ 1596.326735] env[62405]: _type = "Task" [ 1596.326735] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1596.334626] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947037, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1596.386865] env[62405]: DEBUG oslo_vmware.api [None req-664be70a-5a90-415b-93d8-68e8e14e073c tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]523ae4ce-55c6-117c-7946-519b8134af03, 'name': SearchDatastore_Task, 'duration_secs': 0.050553} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1596.387229] env[62405]: DEBUG oslo_concurrency.lockutils [None req-664be70a-5a90-415b-93d8-68e8e14e073c tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1596.387927] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-664be70a-5a90-415b-93d8-68e8e14e073c tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] [instance: a73579d1-8647-49fe-98ce-0baffd1a558f] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1596.387927] env[62405]: DEBUG oslo_concurrency.lockutils [None req-664be70a-5a90-415b-93d8-68e8e14e073c tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1596.387927] env[62405]: DEBUG oslo_concurrency.lockutils [None req-664be70a-5a90-415b-93d8-68e8e14e073c tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1596.388129] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-664be70a-5a90-415b-93d8-68e8e14e073c tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1596.388351] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-17ffbba4-0b23-4774-9504-470276394b32 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.398085] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-664be70a-5a90-415b-93d8-68e8e14e073c tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1596.398234] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-664be70a-5a90-415b-93d8-68e8e14e073c tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1596.398957] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ff3e4d1d-2bf4-4fce-9ee3-b642f8a82539 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.404309] env[62405]: DEBUG oslo_vmware.api [None req-664be70a-5a90-415b-93d8-68e8e14e073c tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Waiting for the task: (returnval){ [ 1596.404309] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5268b341-1350-2f70-1cfc-9f525f9fc9b3" [ 1596.404309] env[62405]: _type = "Task" [ 1596.404309] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1596.412445] env[62405]: DEBUG oslo_vmware.api [None req-664be70a-5a90-415b-93d8-68e8e14e073c tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5268b341-1350-2f70-1cfc-9f525f9fc9b3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1596.474615] env[62405]: DEBUG nova.compute.utils [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1596.475764] env[62405]: DEBUG nova.compute.manager [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 23748dfd-7c60-41db-8acb-7b49cf1c27db] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1596.476033] env[62405]: DEBUG nova.network.neutron [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 23748dfd-7c60-41db-8acb-7b49cf1c27db] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1596.522973] env[62405]: DEBUG nova.policy [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '13540c2dbc2b43bcb151ec7b5894904c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ba9083cddcc24345b6ea5d2cbbbec5ba', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1596.541029] env[62405]: DEBUG oslo_vmware.api [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Task: {'id': task-1947035, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1596.590235] env[62405]: DEBUG nova.compute.manager [req-d7113076-10c5-43b8-bc25-e64d57b89744 req-67658d90-2470-4207-b583-ce9c413a4935 service nova] [instance: 0feaeb5d-9f4a-4166-99b1-f213bc4fa458] Received event network-changed-feb1471c-63ad-4e63-bd9d-e413dee50694 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1596.590390] env[62405]: DEBUG nova.compute.manager [req-d7113076-10c5-43b8-bc25-e64d57b89744 req-67658d90-2470-4207-b583-ce9c413a4935 service nova] [instance: 0feaeb5d-9f4a-4166-99b1-f213bc4fa458] Refreshing instance network info cache due to event network-changed-feb1471c-63ad-4e63-bd9d-e413dee50694. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1596.590817] env[62405]: DEBUG oslo_concurrency.lockutils [req-d7113076-10c5-43b8-bc25-e64d57b89744 req-67658d90-2470-4207-b583-ce9c413a4935 service nova] Acquiring lock "refresh_cache-0feaeb5d-9f4a-4166-99b1-f213bc4fa458" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1596.590817] env[62405]: DEBUG oslo_concurrency.lockutils [req-d7113076-10c5-43b8-bc25-e64d57b89744 req-67658d90-2470-4207-b583-ce9c413a4935 service nova] Acquired lock "refresh_cache-0feaeb5d-9f4a-4166-99b1-f213bc4fa458" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1596.591214] env[62405]: DEBUG nova.network.neutron [req-d7113076-10c5-43b8-bc25-e64d57b89744 req-67658d90-2470-4207-b583-ce9c413a4935 service nova] [instance: 0feaeb5d-9f4a-4166-99b1-f213bc4fa458] Refreshing network info cache for port feb1471c-63ad-4e63-bd9d-e413dee50694 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1596.662534] env[62405]: INFO nova.compute.manager [-] [instance: e8ed73c3-fb86-42c3-aae6-b0c8d03149ce] Took 1.37 seconds to deallocate network for instance. [ 1596.776774] env[62405]: DEBUG oslo_vmware.api [None req-2e2f12fd-1e79-4ee3-9664-fe5a7c93d33f tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947036, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1596.841195] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947037, 'name': CreateVM_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1596.875211] env[62405]: DEBUG nova.network.neutron [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 23748dfd-7c60-41db-8acb-7b49cf1c27db] Successfully created port: 666e898c-754c-4b07-b0d9-dac2a9a5bc6d {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1596.922637] env[62405]: DEBUG oslo_vmware.api [None req-664be70a-5a90-415b-93d8-68e8e14e073c tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5268b341-1350-2f70-1cfc-9f525f9fc9b3, 'name': SearchDatastore_Task, 'duration_secs': 0.01527} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1596.925786] env[62405]: DEBUG oslo_vmware.rw_handles [None req-0885717b-13bf-4773-b81e-724d074ce58c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5297c66f-6669-beff-3eb4-109224c59911/disk-0.vmdk. {{(pid=62405) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1596.927628] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4610ebd4-686e-447c-9db4-51d0c67c5f33 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.937318] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d1024a58-9d7c-434b-86e7-b06c38827b88 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.942211] env[62405]: DEBUG oslo_vmware.api [None req-664be70a-5a90-415b-93d8-68e8e14e073c tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Waiting for the task: (returnval){ [ 1596.942211] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]529dfefd-e641-b4ae-69a3-d8463c65d69e" [ 1596.942211] env[62405]: _type = "Task" [ 1596.942211] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1596.947038] env[62405]: DEBUG oslo_vmware.rw_handles [None req-0885717b-13bf-4773-b81e-724d074ce58c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5297c66f-6669-beff-3eb4-109224c59911/disk-0.vmdk is in state: ready. {{(pid=62405) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1596.947038] env[62405]: ERROR oslo_vmware.rw_handles [None req-0885717b-13bf-4773-b81e-724d074ce58c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5297c66f-6669-beff-3eb4-109224c59911/disk-0.vmdk due to incomplete transfer. [ 1596.947802] env[62405]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-64343edc-d0a1-4fef-8c15-c3c241ec6b44 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.958983] env[62405]: DEBUG oslo_vmware.api [None req-664be70a-5a90-415b-93d8-68e8e14e073c tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]529dfefd-e641-b4ae-69a3-d8463c65d69e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1596.965511] env[62405]: DEBUG oslo_vmware.rw_handles [None req-0885717b-13bf-4773-b81e-724d074ce58c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5297c66f-6669-beff-3eb4-109224c59911/disk-0.vmdk. {{(pid=62405) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1596.966025] env[62405]: DEBUG nova.virt.vmwareapi.images [None req-0885717b-13bf-4773-b81e-724d074ce58c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] [instance: fbedaa93-5968-4b42-b93e-201d2b44b32b] Uploaded image ef4be1c0-1cd8-4e45-9137-1b211391ee06 to the Glance image server {{(pid=62405) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1596.970114] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-0885717b-13bf-4773-b81e-724d074ce58c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] [instance: fbedaa93-5968-4b42-b93e-201d2b44b32b] Destroying the VM {{(pid=62405) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1596.970114] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-cc0c579b-05b8-4ba3-8d5d-c71d1a4df6ff {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.975765] env[62405]: DEBUG oslo_vmware.api [None req-0885717b-13bf-4773-b81e-724d074ce58c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Waiting for the task: (returnval){ [ 1596.975765] env[62405]: value = "task-1947038" [ 1596.975765] env[62405]: _type = "Task" [ 1596.975765] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1596.979123] env[62405]: DEBUG nova.compute.manager [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 23748dfd-7c60-41db-8acb-7b49cf1c27db] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1596.991599] env[62405]: DEBUG oslo_vmware.api [None req-0885717b-13bf-4773-b81e-724d074ce58c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Task: {'id': task-1947038, 'name': Destroy_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1597.044493] env[62405]: DEBUG oslo_vmware.api [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Task: {'id': task-1947035, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1597.174343] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5ce318a9-2d1f-4297-a62d-a8a0f0e887c1 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1597.176956] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f5f395c-71cc-4e8f-b494-314469d6c997 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.185321] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbdc04c9-67d7-49e5-be93-54d42a948704 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.220147] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95909a0c-875f-4ad2-8157-2fb903122665 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.228292] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2882380-defd-48cb-8562-90299cb8cd3f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.244134] env[62405]: DEBUG nova.compute.provider_tree [None req-8aeb1831-33a5-4b84-9f03-3c806576c207 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1597.272803] env[62405]: DEBUG oslo_vmware.api [None req-2e2f12fd-1e79-4ee3-9664-fe5a7c93d33f tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947036, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1597.341788] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947037, 'name': CreateVM_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1597.447757] env[62405]: DEBUG nova.network.neutron [req-d7113076-10c5-43b8-bc25-e64d57b89744 req-67658d90-2470-4207-b583-ce9c413a4935 service nova] [instance: 0feaeb5d-9f4a-4166-99b1-f213bc4fa458] Updated VIF entry in instance network info cache for port feb1471c-63ad-4e63-bd9d-e413dee50694. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1597.448275] env[62405]: DEBUG nova.network.neutron [req-d7113076-10c5-43b8-bc25-e64d57b89744 req-67658d90-2470-4207-b583-ce9c413a4935 service nova] [instance: 0feaeb5d-9f4a-4166-99b1-f213bc4fa458] Updating instance_info_cache with network_info: [{"id": "feb1471c-63ad-4e63-bd9d-e413dee50694", "address": "fa:16:3e:17:75:40", "network": {"id": "bf574ed2-2a7e-4cf2-aa38-0adccf456674", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-2099360932-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8cf1f39c8aef41df8c86777f80980664", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60567ee6-01d0-4b16-9c7a-4a896827d6eb", "external-id": "nsx-vlan-transportzone-28", "segmentation_id": 28, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfeb1471c-63", "ovs_interfaceid": "feb1471c-63ad-4e63-bd9d-e413dee50694", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1597.456414] env[62405]: DEBUG oslo_vmware.api [None req-664be70a-5a90-415b-93d8-68e8e14e073c tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]529dfefd-e641-b4ae-69a3-d8463c65d69e, 'name': SearchDatastore_Task, 'duration_secs': 0.021479} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1597.456873] env[62405]: DEBUG oslo_concurrency.lockutils [None req-664be70a-5a90-415b-93d8-68e8e14e073c tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1597.457597] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-664be70a-5a90-415b-93d8-68e8e14e073c tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] a73579d1-8647-49fe-98ce-0baffd1a558f/a73579d1-8647-49fe-98ce-0baffd1a558f.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1597.458045] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-32407405-d324-48c4-8dff-3a4ef73dc1c3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.466816] env[62405]: DEBUG oslo_vmware.api [None req-664be70a-5a90-415b-93d8-68e8e14e073c tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Waiting for the task: (returnval){ [ 1597.466816] env[62405]: value = "task-1947039" [ 1597.466816] env[62405]: _type = "Task" [ 1597.466816] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1597.475976] env[62405]: DEBUG oslo_vmware.api [None req-664be70a-5a90-415b-93d8-68e8e14e073c tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Task: {'id': task-1947039, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1597.492707] env[62405]: DEBUG oslo_vmware.api [None req-0885717b-13bf-4773-b81e-724d074ce58c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Task: {'id': task-1947038, 'name': Destroy_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1597.545136] env[62405]: DEBUG oslo_vmware.api [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Task: {'id': task-1947035, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.61433} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1597.545546] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 9e73e2ab-1eac-4aca-905f-a8391d3f5a9b/9e73e2ab-1eac-4aca-905f-a8391d3f5a9b.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1597.545780] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] [instance: 9e73e2ab-1eac-4aca-905f-a8391d3f5a9b] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1597.546140] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6475061a-c6de-481d-acaa-b205aa3aedc6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.552295] env[62405]: DEBUG oslo_vmware.api [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Waiting for the task: (returnval){ [ 1597.552295] env[62405]: value = "task-1947040" [ 1597.552295] env[62405]: _type = "Task" [ 1597.552295] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1597.561744] env[62405]: DEBUG oslo_vmware.api [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Task: {'id': task-1947040, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1597.747578] env[62405]: DEBUG nova.scheduler.client.report [None req-8aeb1831-33a5-4b84-9f03-3c806576c207 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1597.774740] env[62405]: DEBUG oslo_vmware.api [None req-2e2f12fd-1e79-4ee3-9664-fe5a7c93d33f tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947036, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1597.839534] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947037, 'name': CreateVM_Task, 'duration_secs': 1.337422} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1597.839944] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0feaeb5d-9f4a-4166-99b1-f213bc4fa458] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1597.840833] env[62405]: DEBUG oslo_concurrency.lockutils [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1597.841177] env[62405]: DEBUG oslo_concurrency.lockutils [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1597.841729] env[62405]: DEBUG oslo_concurrency.lockutils [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1597.842598] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-edb94bd4-117b-44e8-b355-f4489dc8e80f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.847297] env[62405]: DEBUG oslo_vmware.api [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Waiting for the task: (returnval){ [ 1597.847297] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5296bc43-3688-4de0-23e5-a992b5f41a5f" [ 1597.847297] env[62405]: _type = "Task" [ 1597.847297] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1597.856129] env[62405]: DEBUG oslo_vmware.api [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5296bc43-3688-4de0-23e5-a992b5f41a5f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1597.951962] env[62405]: DEBUG oslo_concurrency.lockutils [req-d7113076-10c5-43b8-bc25-e64d57b89744 req-67658d90-2470-4207-b583-ce9c413a4935 service nova] Releasing lock "refresh_cache-0feaeb5d-9f4a-4166-99b1-f213bc4fa458" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1597.952398] env[62405]: DEBUG nova.compute.manager [req-d7113076-10c5-43b8-bc25-e64d57b89744 req-67658d90-2470-4207-b583-ce9c413a4935 service nova] [instance: e8ed73c3-fb86-42c3-aae6-b0c8d03149ce] Received event network-vif-deleted-ce532b3f-30ef-4d32-b533-7a04d491a6d4 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1597.978107] env[62405]: DEBUG oslo_vmware.api [None req-664be70a-5a90-415b-93d8-68e8e14e073c tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Task: {'id': task-1947039, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1597.991016] env[62405]: DEBUG oslo_vmware.api [None req-0885717b-13bf-4773-b81e-724d074ce58c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Task: {'id': task-1947038, 'name': Destroy_Task} progress is 33%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1597.995062] env[62405]: DEBUG nova.compute.manager [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 23748dfd-7c60-41db-8acb-7b49cf1c27db] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1598.020081] env[62405]: DEBUG nova.virt.hardware [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1598.020434] env[62405]: DEBUG nova.virt.hardware [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1598.020621] env[62405]: DEBUG nova.virt.hardware [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1598.020945] env[62405]: DEBUG nova.virt.hardware [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1598.021126] env[62405]: DEBUG nova.virt.hardware [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1598.021279] env[62405]: DEBUG nova.virt.hardware [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1598.021498] env[62405]: DEBUG nova.virt.hardware [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1598.021661] env[62405]: DEBUG nova.virt.hardware [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1598.021841] env[62405]: DEBUG nova.virt.hardware [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1598.022050] env[62405]: DEBUG nova.virt.hardware [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1598.022240] env[62405]: DEBUG nova.virt.hardware [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1598.023175] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-895eab7f-974f-4754-bd84-7daff4e416ab {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.032785] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-618fedba-0ea5-4e73-9a34-1f386d9f6da9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.063043] env[62405]: DEBUG oslo_vmware.api [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Task: {'id': task-1947040, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.127124} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1598.063410] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] [instance: 9e73e2ab-1eac-4aca-905f-a8391d3f5a9b] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1598.064391] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e850ad88-71b7-4fd2-b476-a9b3a5682336 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.090165] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] [instance: 9e73e2ab-1eac-4aca-905f-a8391d3f5a9b] Reconfiguring VM instance instance-00000020 to attach disk [datastore1] 9e73e2ab-1eac-4aca-905f-a8391d3f5a9b/9e73e2ab-1eac-4aca-905f-a8391d3f5a9b.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1598.090733] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6d3954b2-7f5e-4231-846a-649b89205140 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.113681] env[62405]: DEBUG oslo_vmware.api [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Waiting for the task: (returnval){ [ 1598.113681] env[62405]: value = "task-1947041" [ 1598.113681] env[62405]: _type = "Task" [ 1598.113681] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1598.126231] env[62405]: DEBUG oslo_vmware.api [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Task: {'id': task-1947041, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1598.256013] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8aeb1831-33a5-4b84-9f03-3c806576c207 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.289s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1598.258603] env[62405]: DEBUG oslo_concurrency.lockutils [None req-658dcdfc-19ab-4f5d-8a1b-f6e6dfce33ff tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.312s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1598.259384] env[62405]: DEBUG nova.objects.instance [None req-658dcdfc-19ab-4f5d-8a1b-f6e6dfce33ff tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Lazy-loading 'resources' on Instance uuid a1d35009-ea11-4e64-bbe4-604ed39d08f4 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1598.274834] env[62405]: DEBUG oslo_vmware.api [None req-2e2f12fd-1e79-4ee3-9664-fe5a7c93d33f tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947036, 'name': ReconfigVM_Task, 'duration_secs': 1.96009} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1598.275220] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-2e2f12fd-1e79-4ee3-9664-fe5a7c93d33f tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 900b95b5-fe5a-46c1-909a-f81b82ced0ef] Reconfigured VM instance instance-0000001e to attach disk [datastore1] volume-442e8afb-d4f7-4db7-9a25-37612af22952/volume-442e8afb-d4f7-4db7-9a25-37612af22952.vmdk or device None with type thin {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1598.280334] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0ad3cb8d-6da0-402c-9f8d-8aa7436dec34 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.291246] env[62405]: INFO nova.scheduler.client.report [None req-8aeb1831-33a5-4b84-9f03-3c806576c207 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Deleted allocations for instance ca0aca02-4b99-4393-900c-b9cb0dad55c7 [ 1598.297656] env[62405]: DEBUG oslo_vmware.api [None req-2e2f12fd-1e79-4ee3-9664-fe5a7c93d33f tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for the task: (returnval){ [ 1598.297656] env[62405]: value = "task-1947042" [ 1598.297656] env[62405]: _type = "Task" [ 1598.297656] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1598.308057] env[62405]: DEBUG oslo_vmware.api [None req-2e2f12fd-1e79-4ee3-9664-fe5a7c93d33f tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947042, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1598.358552] env[62405]: DEBUG oslo_vmware.api [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5296bc43-3688-4de0-23e5-a992b5f41a5f, 'name': SearchDatastore_Task, 'duration_secs': 0.066204} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1598.358927] env[62405]: DEBUG oslo_concurrency.lockutils [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1598.359114] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] [instance: 0feaeb5d-9f4a-4166-99b1-f213bc4fa458] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1598.359346] env[62405]: DEBUG oslo_concurrency.lockutils [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1598.359504] env[62405]: DEBUG oslo_concurrency.lockutils [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1598.360097] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1598.360097] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d623a341-b03a-468e-9476-56d4c8a0abaa {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.368053] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1598.368240] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1598.368967] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4ba5be94-d557-40f1-83fc-4395d66f0c64 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.374379] env[62405]: DEBUG oslo_vmware.api [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Waiting for the task: (returnval){ [ 1598.374379] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]525f206b-82ce-8831-a557-3feddd087a98" [ 1598.374379] env[62405]: _type = "Task" [ 1598.374379] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1598.382960] env[62405]: DEBUG oslo_vmware.api [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]525f206b-82ce-8831-a557-3feddd087a98, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1598.478300] env[62405]: DEBUG oslo_vmware.api [None req-664be70a-5a90-415b-93d8-68e8e14e073c tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Task: {'id': task-1947039, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.744755} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1598.478572] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-664be70a-5a90-415b-93d8-68e8e14e073c tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] a73579d1-8647-49fe-98ce-0baffd1a558f/a73579d1-8647-49fe-98ce-0baffd1a558f.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1598.478778] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-664be70a-5a90-415b-93d8-68e8e14e073c tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] [instance: a73579d1-8647-49fe-98ce-0baffd1a558f] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1598.479058] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-250dc1d6-2ff1-496c-90bd-c859bc6267e9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.488981] env[62405]: DEBUG oslo_vmware.api [None req-0885717b-13bf-4773-b81e-724d074ce58c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Task: {'id': task-1947038, 'name': Destroy_Task, 'duration_secs': 1.143715} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1598.490415] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-0885717b-13bf-4773-b81e-724d074ce58c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] [instance: fbedaa93-5968-4b42-b93e-201d2b44b32b] Destroyed the VM [ 1598.490704] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-0885717b-13bf-4773-b81e-724d074ce58c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] [instance: fbedaa93-5968-4b42-b93e-201d2b44b32b] Deleting Snapshot of the VM instance {{(pid=62405) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1598.491016] env[62405]: DEBUG oslo_vmware.api [None req-664be70a-5a90-415b-93d8-68e8e14e073c tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Waiting for the task: (returnval){ [ 1598.491016] env[62405]: value = "task-1947043" [ 1598.491016] env[62405]: _type = "Task" [ 1598.491016] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1598.491219] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-4977a388-8a18-4e0a-95d8-826deb2b61be {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.503683] env[62405]: DEBUG oslo_vmware.api [None req-664be70a-5a90-415b-93d8-68e8e14e073c tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Task: {'id': task-1947043, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1598.505479] env[62405]: DEBUG oslo_vmware.api [None req-0885717b-13bf-4773-b81e-724d074ce58c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Waiting for the task: (returnval){ [ 1598.505479] env[62405]: value = "task-1947044" [ 1598.505479] env[62405]: _type = "Task" [ 1598.505479] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1598.514949] env[62405]: DEBUG oslo_vmware.api [None req-0885717b-13bf-4773-b81e-724d074ce58c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Task: {'id': task-1947044, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1598.626390] env[62405]: DEBUG oslo_vmware.api [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Task: {'id': task-1947041, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1598.646075] env[62405]: DEBUG nova.compute.manager [req-d102668a-61f0-4ee3-90e7-6c93abfbaae7 req-e4e97c54-5c84-4ef2-ae8a-6db876b5caaf service nova] [instance: 23748dfd-7c60-41db-8acb-7b49cf1c27db] Received event network-vif-plugged-666e898c-754c-4b07-b0d9-dac2a9a5bc6d {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1598.646449] env[62405]: DEBUG oslo_concurrency.lockutils [req-d102668a-61f0-4ee3-90e7-6c93abfbaae7 req-e4e97c54-5c84-4ef2-ae8a-6db876b5caaf service nova] Acquiring lock "23748dfd-7c60-41db-8acb-7b49cf1c27db-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1598.646687] env[62405]: DEBUG oslo_concurrency.lockutils [req-d102668a-61f0-4ee3-90e7-6c93abfbaae7 req-e4e97c54-5c84-4ef2-ae8a-6db876b5caaf service nova] Lock "23748dfd-7c60-41db-8acb-7b49cf1c27db-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1598.646877] env[62405]: DEBUG oslo_concurrency.lockutils [req-d102668a-61f0-4ee3-90e7-6c93abfbaae7 req-e4e97c54-5c84-4ef2-ae8a-6db876b5caaf service nova] Lock "23748dfd-7c60-41db-8acb-7b49cf1c27db-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1598.647274] env[62405]: DEBUG nova.compute.manager [req-d102668a-61f0-4ee3-90e7-6c93abfbaae7 req-e4e97c54-5c84-4ef2-ae8a-6db876b5caaf service nova] [instance: 23748dfd-7c60-41db-8acb-7b49cf1c27db] No waiting events found dispatching network-vif-plugged-666e898c-754c-4b07-b0d9-dac2a9a5bc6d {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1598.647478] env[62405]: WARNING nova.compute.manager [req-d102668a-61f0-4ee3-90e7-6c93abfbaae7 req-e4e97c54-5c84-4ef2-ae8a-6db876b5caaf service nova] [instance: 23748dfd-7c60-41db-8acb-7b49cf1c27db] Received unexpected event network-vif-plugged-666e898c-754c-4b07-b0d9-dac2a9a5bc6d for instance with vm_state building and task_state spawning. [ 1598.677200] env[62405]: DEBUG nova.network.neutron [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 23748dfd-7c60-41db-8acb-7b49cf1c27db] Successfully updated port: 666e898c-754c-4b07-b0d9-dac2a9a5bc6d {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1598.804108] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8aeb1831-33a5-4b84-9f03-3c806576c207 tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Lock "ca0aca02-4b99-4393-900c-b9cb0dad55c7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.057s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1598.813932] env[62405]: DEBUG oslo_vmware.api [None req-2e2f12fd-1e79-4ee3-9664-fe5a7c93d33f tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947042, 'name': ReconfigVM_Task, 'duration_secs': 0.145947} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1598.814315] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-2e2f12fd-1e79-4ee3-9664-fe5a7c93d33f tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 900b95b5-fe5a-46c1-909a-f81b82ced0ef] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-401389', 'volume_id': '442e8afb-d4f7-4db7-9a25-37612af22952', 'name': 'volume-442e8afb-d4f7-4db7-9a25-37612af22952', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '900b95b5-fe5a-46c1-909a-f81b82ced0ef', 'attached_at': '', 'detached_at': '', 'volume_id': '442e8afb-d4f7-4db7-9a25-37612af22952', 'serial': '442e8afb-d4f7-4db7-9a25-37612af22952'} {{(pid=62405) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1598.884313] env[62405]: DEBUG oslo_vmware.api [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]525f206b-82ce-8831-a557-3feddd087a98, 'name': SearchDatastore_Task, 'duration_secs': 0.035434} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1598.887630] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c613e0f5-3230-41a4-ac91-f192d40d069e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.892877] env[62405]: DEBUG oslo_vmware.api [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Waiting for the task: (returnval){ [ 1598.892877] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52e6ee85-3d3d-cc59-f277-6d16f76cea7b" [ 1598.892877] env[62405]: _type = "Task" [ 1598.892877] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1598.903327] env[62405]: DEBUG oslo_vmware.api [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52e6ee85-3d3d-cc59-f277-6d16f76cea7b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1599.003756] env[62405]: DEBUG oslo_vmware.api [None req-664be70a-5a90-415b-93d8-68e8e14e073c tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Task: {'id': task-1947043, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062076} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1599.004105] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-664be70a-5a90-415b-93d8-68e8e14e073c tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] [instance: a73579d1-8647-49fe-98ce-0baffd1a558f] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1599.005021] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6729b5ba-cef9-46c4-a2b3-914799095812 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.024320] env[62405]: DEBUG oslo_vmware.api [None req-0885717b-13bf-4773-b81e-724d074ce58c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Task: {'id': task-1947044, 'name': RemoveSnapshot_Task} progress is 12%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1599.033575] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-664be70a-5a90-415b-93d8-68e8e14e073c tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] [instance: a73579d1-8647-49fe-98ce-0baffd1a558f] Reconfiguring VM instance instance-00000022 to attach disk [datastore1] a73579d1-8647-49fe-98ce-0baffd1a558f/a73579d1-8647-49fe-98ce-0baffd1a558f.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1599.036630] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4a40a7ff-6536-4530-be15-7207bac528f7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.057437] env[62405]: DEBUG oslo_vmware.api [None req-664be70a-5a90-415b-93d8-68e8e14e073c tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Waiting for the task: (returnval){ [ 1599.057437] env[62405]: value = "task-1947045" [ 1599.057437] env[62405]: _type = "Task" [ 1599.057437] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1599.067752] env[62405]: DEBUG oslo_vmware.api [None req-664be70a-5a90-415b-93d8-68e8e14e073c tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Task: {'id': task-1947045, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1599.129051] env[62405]: DEBUG oslo_vmware.api [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Task: {'id': task-1947041, 'name': ReconfigVM_Task, 'duration_secs': 0.515551} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1599.129439] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] [instance: 9e73e2ab-1eac-4aca-905f-a8391d3f5a9b] Reconfigured VM instance instance-00000020 to attach disk [datastore1] 9e73e2ab-1eac-4aca-905f-a8391d3f5a9b/9e73e2ab-1eac-4aca-905f-a8391d3f5a9b.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1599.131164] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b2325ae4-4b79-43a3-9b2d-9cf849c06a80 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.137601] env[62405]: DEBUG oslo_vmware.api [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Waiting for the task: (returnval){ [ 1599.137601] env[62405]: value = "task-1947046" [ 1599.137601] env[62405]: _type = "Task" [ 1599.137601] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1599.146735] env[62405]: DEBUG oslo_vmware.api [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Task: {'id': task-1947046, 'name': Rename_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1599.181199] env[62405]: DEBUG oslo_concurrency.lockutils [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquiring lock "refresh_cache-23748dfd-7c60-41db-8acb-7b49cf1c27db" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1599.181329] env[62405]: DEBUG oslo_concurrency.lockutils [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquired lock "refresh_cache-23748dfd-7c60-41db-8acb-7b49cf1c27db" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1599.181484] env[62405]: DEBUG nova.network.neutron [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 23748dfd-7c60-41db-8acb-7b49cf1c27db] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1599.310280] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae683b8a-304c-4ee8-a65e-27027bead090 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.321291] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b59fb17b-9f07-4018-a31d-9fda25b8cb78 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.354953] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1221afe9-1295-49f3-bdcd-0661c3de71e1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.363700] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9462a299-81bb-4e47-bd8d-861ea11c3f69 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.377987] env[62405]: DEBUG nova.compute.provider_tree [None req-658dcdfc-19ab-4f5d-8a1b-f6e6dfce33ff tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1599.403283] env[62405]: DEBUG oslo_vmware.api [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52e6ee85-3d3d-cc59-f277-6d16f76cea7b, 'name': SearchDatastore_Task, 'duration_secs': 0.027891} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1599.403550] env[62405]: DEBUG oslo_concurrency.lockutils [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1599.403796] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 0feaeb5d-9f4a-4166-99b1-f213bc4fa458/0feaeb5d-9f4a-4166-99b1-f213bc4fa458.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1599.404098] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a228bfe7-46ea-4e2f-8c35-d88a038a3042 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.410193] env[62405]: DEBUG oslo_vmware.api [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Waiting for the task: (returnval){ [ 1599.410193] env[62405]: value = "task-1947047" [ 1599.410193] env[62405]: _type = "Task" [ 1599.410193] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1599.417728] env[62405]: DEBUG oslo_vmware.api [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Task: {'id': task-1947047, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1599.517752] env[62405]: DEBUG oslo_vmware.api [None req-0885717b-13bf-4773-b81e-724d074ce58c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Task: {'id': task-1947044, 'name': RemoveSnapshot_Task, 'duration_secs': 0.841486} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1599.518092] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-0885717b-13bf-4773-b81e-724d074ce58c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] [instance: fbedaa93-5968-4b42-b93e-201d2b44b32b] Deleted Snapshot of the VM instance {{(pid=62405) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1599.518380] env[62405]: INFO nova.compute.manager [None req-0885717b-13bf-4773-b81e-724d074ce58c tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] [instance: fbedaa93-5968-4b42-b93e-201d2b44b32b] Took 24.28 seconds to snapshot the instance on the hypervisor. [ 1599.567764] env[62405]: DEBUG oslo_vmware.api [None req-664be70a-5a90-415b-93d8-68e8e14e073c tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Task: {'id': task-1947045, 'name': ReconfigVM_Task, 'duration_secs': 0.337432} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1599.568146] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-664be70a-5a90-415b-93d8-68e8e14e073c tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] [instance: a73579d1-8647-49fe-98ce-0baffd1a558f] Reconfigured VM instance instance-00000022 to attach disk [datastore1] a73579d1-8647-49fe-98ce-0baffd1a558f/a73579d1-8647-49fe-98ce-0baffd1a558f.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1599.568812] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c4a94c82-0454-4f41-8a74-a4743445c93c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.575295] env[62405]: DEBUG oslo_vmware.api [None req-664be70a-5a90-415b-93d8-68e8e14e073c tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Waiting for the task: (returnval){ [ 1599.575295] env[62405]: value = "task-1947048" [ 1599.575295] env[62405]: _type = "Task" [ 1599.575295] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1599.585278] env[62405]: DEBUG oslo_vmware.api [None req-664be70a-5a90-415b-93d8-68e8e14e073c tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Task: {'id': task-1947048, 'name': Rename_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1599.649104] env[62405]: DEBUG oslo_vmware.api [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Task: {'id': task-1947046, 'name': Rename_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1599.741635] env[62405]: DEBUG nova.network.neutron [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 23748dfd-7c60-41db-8acb-7b49cf1c27db] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1599.864132] env[62405]: DEBUG nova.objects.instance [None req-2e2f12fd-1e79-4ee3-9664-fe5a7c93d33f tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Lazy-loading 'flavor' on Instance uuid 900b95b5-fe5a-46c1-909a-f81b82ced0ef {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1599.881261] env[62405]: DEBUG nova.scheduler.client.report [None req-658dcdfc-19ab-4f5d-8a1b-f6e6dfce33ff tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1599.920884] env[62405]: DEBUG oslo_vmware.api [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Task: {'id': task-1947047, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1599.926135] env[62405]: DEBUG nova.network.neutron [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 23748dfd-7c60-41db-8acb-7b49cf1c27db] Updating instance_info_cache with network_info: [{"id": "666e898c-754c-4b07-b0d9-dac2a9a5bc6d", "address": "fa:16:3e:59:60:92", "network": {"id": "8e7f7222-48db-4dd5-a9e8-9a6d2b598918", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1945720715-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba9083cddcc24345b6ea5d2cbbbec5ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap666e898c-75", "ovs_interfaceid": "666e898c-754c-4b07-b0d9-dac2a9a5bc6d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1600.086392] env[62405]: DEBUG oslo_vmware.api [None req-664be70a-5a90-415b-93d8-68e8e14e073c tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Task: {'id': task-1947048, 'name': Rename_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1600.148773] env[62405]: DEBUG oslo_vmware.api [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Task: {'id': task-1947046, 'name': Rename_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1600.321755] env[62405]: DEBUG oslo_concurrency.lockutils [None req-18147140-f297-4e54-acf8-9815c9cafa0e tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquiring lock "900b95b5-fe5a-46c1-909a-f81b82ced0ef" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1600.369347] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2e2f12fd-1e79-4ee3-9664-fe5a7c93d33f tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Lock "900b95b5-fe5a-46c1-909a-f81b82ced0ef" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.794s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1600.370275] env[62405]: DEBUG oslo_concurrency.lockutils [None req-18147140-f297-4e54-acf8-9815c9cafa0e tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Lock "900b95b5-fe5a-46c1-909a-f81b82ced0ef" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.049s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1600.370482] env[62405]: DEBUG oslo_concurrency.lockutils [None req-18147140-f297-4e54-acf8-9815c9cafa0e tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquiring lock "900b95b5-fe5a-46c1-909a-f81b82ced0ef-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1600.371623] env[62405]: DEBUG oslo_concurrency.lockutils [None req-18147140-f297-4e54-acf8-9815c9cafa0e tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Lock "900b95b5-fe5a-46c1-909a-f81b82ced0ef-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1600.371623] env[62405]: DEBUG oslo_concurrency.lockutils [None req-18147140-f297-4e54-acf8-9815c9cafa0e tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Lock "900b95b5-fe5a-46c1-909a-f81b82ced0ef-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1600.372498] env[62405]: INFO nova.compute.manager [None req-18147140-f297-4e54-acf8-9815c9cafa0e tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 900b95b5-fe5a-46c1-909a-f81b82ced0ef] Terminating instance [ 1600.388690] env[62405]: DEBUG oslo_concurrency.lockutils [None req-658dcdfc-19ab-4f5d-8a1b-f6e6dfce33ff tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.130s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1600.391398] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.114s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1600.393222] env[62405]: INFO nova.compute.claims [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 67bf25ea-5774-4246-a3e6-2aeb0ebf6731] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1600.414983] env[62405]: INFO nova.scheduler.client.report [None req-658dcdfc-19ab-4f5d-8a1b-f6e6dfce33ff tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Deleted allocations for instance a1d35009-ea11-4e64-bbe4-604ed39d08f4 [ 1600.428215] env[62405]: DEBUG oslo_concurrency.lockutils [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Releasing lock "refresh_cache-23748dfd-7c60-41db-8acb-7b49cf1c27db" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1600.428519] env[62405]: DEBUG nova.compute.manager [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 23748dfd-7c60-41db-8acb-7b49cf1c27db] Instance network_info: |[{"id": "666e898c-754c-4b07-b0d9-dac2a9a5bc6d", "address": "fa:16:3e:59:60:92", "network": {"id": "8e7f7222-48db-4dd5-a9e8-9a6d2b598918", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1945720715-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba9083cddcc24345b6ea5d2cbbbec5ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap666e898c-75", "ovs_interfaceid": "666e898c-754c-4b07-b0d9-dac2a9a5bc6d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1600.428800] env[62405]: DEBUG oslo_vmware.api [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Task: {'id': task-1947047, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1600.429173] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 23748dfd-7c60-41db-8acb-7b49cf1c27db] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:59:60:92', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '15538852-1a3f-4f71-b4a9-4923c5837c4f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '666e898c-754c-4b07-b0d9-dac2a9a5bc6d', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1600.436601] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Creating folder: Project (ba9083cddcc24345b6ea5d2cbbbec5ba). Parent ref: group-v401284. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1600.437462] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-be175abc-6dc1-49a4-ba43-860ac1037d8e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.448333] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Created folder: Project (ba9083cddcc24345b6ea5d2cbbbec5ba) in parent group-v401284. [ 1600.448560] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Creating folder: Instances. Parent ref: group-v401396. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1600.448794] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7529e1bb-d978-49db-8da3-869fb1cfb599 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.458449] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Created folder: Instances in parent group-v401396. [ 1600.458757] env[62405]: DEBUG oslo.service.loopingcall [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1600.459077] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 23748dfd-7c60-41db-8acb-7b49cf1c27db] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1600.459372] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-00eac050-a51c-4446-a41f-1b1cc0852dec {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.482964] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1600.482964] env[62405]: value = "task-1947051" [ 1600.482964] env[62405]: _type = "Task" [ 1600.482964] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1600.493656] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947051, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1600.588385] env[62405]: DEBUG oslo_vmware.api [None req-664be70a-5a90-415b-93d8-68e8e14e073c tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Task: {'id': task-1947048, 'name': Rename_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1600.656736] env[62405]: DEBUG oslo_vmware.api [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Task: {'id': task-1947046, 'name': Rename_Task, 'duration_secs': 1.187583} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1600.656736] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] [instance: 9e73e2ab-1eac-4aca-905f-a8391d3f5a9b] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1600.656736] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-45291590-1f0a-4add-8f40-b883e52413a3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.665933] env[62405]: DEBUG oslo_vmware.api [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Waiting for the task: (returnval){ [ 1600.665933] env[62405]: value = "task-1947052" [ 1600.665933] env[62405]: _type = "Task" [ 1600.665933] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1600.679418] env[62405]: DEBUG oslo_vmware.api [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Task: {'id': task-1947052, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1600.715340] env[62405]: DEBUG nova.compute.manager [req-f9a5d0f0-41a2-46d1-aae1-c73c11c95829 req-6a1b0a08-1885-41d2-873d-50a101e9d7d1 service nova] [instance: 23748dfd-7c60-41db-8acb-7b49cf1c27db] Received event network-changed-666e898c-754c-4b07-b0d9-dac2a9a5bc6d {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1600.715706] env[62405]: DEBUG nova.compute.manager [req-f9a5d0f0-41a2-46d1-aae1-c73c11c95829 req-6a1b0a08-1885-41d2-873d-50a101e9d7d1 service nova] [instance: 23748dfd-7c60-41db-8acb-7b49cf1c27db] Refreshing instance network info cache due to event network-changed-666e898c-754c-4b07-b0d9-dac2a9a5bc6d. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1600.716095] env[62405]: DEBUG oslo_concurrency.lockutils [req-f9a5d0f0-41a2-46d1-aae1-c73c11c95829 req-6a1b0a08-1885-41d2-873d-50a101e9d7d1 service nova] Acquiring lock "refresh_cache-23748dfd-7c60-41db-8acb-7b49cf1c27db" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1600.716450] env[62405]: DEBUG oslo_concurrency.lockutils [req-f9a5d0f0-41a2-46d1-aae1-c73c11c95829 req-6a1b0a08-1885-41d2-873d-50a101e9d7d1 service nova] Acquired lock "refresh_cache-23748dfd-7c60-41db-8acb-7b49cf1c27db" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1600.716752] env[62405]: DEBUG nova.network.neutron [req-f9a5d0f0-41a2-46d1-aae1-c73c11c95829 req-6a1b0a08-1885-41d2-873d-50a101e9d7d1 service nova] [instance: 23748dfd-7c60-41db-8acb-7b49cf1c27db] Refreshing network info cache for port 666e898c-754c-4b07-b0d9-dac2a9a5bc6d {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1600.876833] env[62405]: DEBUG nova.compute.manager [None req-18147140-f297-4e54-acf8-9815c9cafa0e tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 900b95b5-fe5a-46c1-909a-f81b82ced0ef] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1600.877281] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-18147140-f297-4e54-acf8-9815c9cafa0e tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 900b95b5-fe5a-46c1-909a-f81b82ced0ef] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1600.877472] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e99e4e21-e8b1-4c27-a1b3-e9b7322b6549 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.884965] env[62405]: DEBUG oslo_vmware.api [None req-18147140-f297-4e54-acf8-9815c9cafa0e tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for the task: (returnval){ [ 1600.884965] env[62405]: value = "task-1947053" [ 1600.884965] env[62405]: _type = "Task" [ 1600.884965] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1600.894261] env[62405]: DEBUG oslo_vmware.api [None req-18147140-f297-4e54-acf8-9815c9cafa0e tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947053, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1600.925887] env[62405]: DEBUG oslo_vmware.api [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Task: {'id': task-1947047, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.406742} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1600.926404] env[62405]: DEBUG oslo_concurrency.lockutils [None req-658dcdfc-19ab-4f5d-8a1b-f6e6dfce33ff tempest-ListServersNegativeTestJSON-224919451 tempest-ListServersNegativeTestJSON-224919451-project-member] Lock "a1d35009-ea11-4e64-bbe4-604ed39d08f4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.500s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1600.927300] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 0feaeb5d-9f4a-4166-99b1-f213bc4fa458/0feaeb5d-9f4a-4166-99b1-f213bc4fa458.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1600.928243] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] [instance: 0feaeb5d-9f4a-4166-99b1-f213bc4fa458] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1600.929153] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-489000b0-2ee8-4b54-a1c7-96498f88c147 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.940722] env[62405]: DEBUG oslo_vmware.api [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Waiting for the task: (returnval){ [ 1600.940722] env[62405]: value = "task-1947054" [ 1600.940722] env[62405]: _type = "Task" [ 1600.940722] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1600.952569] env[62405]: DEBUG oslo_vmware.api [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Task: {'id': task-1947054, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1600.992964] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947051, 'name': CreateVM_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1601.017724] env[62405]: DEBUG oslo_concurrency.lockutils [None req-282b6a3e-f45f-4a50-9f6c-20b5884135cb tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Acquiring lock "fbedaa93-5968-4b42-b93e-201d2b44b32b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1601.017724] env[62405]: DEBUG oslo_concurrency.lockutils [None req-282b6a3e-f45f-4a50-9f6c-20b5884135cb tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Lock "fbedaa93-5968-4b42-b93e-201d2b44b32b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1601.017724] env[62405]: DEBUG oslo_concurrency.lockutils [None req-282b6a3e-f45f-4a50-9f6c-20b5884135cb tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Acquiring lock "fbedaa93-5968-4b42-b93e-201d2b44b32b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1601.017856] env[62405]: DEBUG oslo_concurrency.lockutils [None req-282b6a3e-f45f-4a50-9f6c-20b5884135cb tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Lock "fbedaa93-5968-4b42-b93e-201d2b44b32b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1601.017927] env[62405]: DEBUG oslo_concurrency.lockutils [None req-282b6a3e-f45f-4a50-9f6c-20b5884135cb tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Lock "fbedaa93-5968-4b42-b93e-201d2b44b32b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1601.020214] env[62405]: INFO nova.compute.manager [None req-282b6a3e-f45f-4a50-9f6c-20b5884135cb tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] [instance: fbedaa93-5968-4b42-b93e-201d2b44b32b] Terminating instance [ 1601.086474] env[62405]: DEBUG oslo_vmware.api [None req-664be70a-5a90-415b-93d8-68e8e14e073c tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Task: {'id': task-1947048, 'name': Rename_Task, 'duration_secs': 1.124118} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1601.086746] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-664be70a-5a90-415b-93d8-68e8e14e073c tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] [instance: a73579d1-8647-49fe-98ce-0baffd1a558f] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1601.087012] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-52a458b1-beea-4bce-bf6a-d9d402ac963b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.093273] env[62405]: DEBUG oslo_vmware.api [None req-664be70a-5a90-415b-93d8-68e8e14e073c tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Waiting for the task: (returnval){ [ 1601.093273] env[62405]: value = "task-1947055" [ 1601.093273] env[62405]: _type = "Task" [ 1601.093273] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1601.101044] env[62405]: DEBUG oslo_vmware.api [None req-664be70a-5a90-415b-93d8-68e8e14e073c tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Task: {'id': task-1947055, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1601.184780] env[62405]: DEBUG oslo_vmware.api [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Task: {'id': task-1947052, 'name': PowerOnVM_Task} progress is 71%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1601.398160] env[62405]: DEBUG oslo_vmware.api [None req-18147140-f297-4e54-acf8-9815c9cafa0e tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947053, 'name': PowerOffVM_Task, 'duration_secs': 0.505394} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1601.398463] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-18147140-f297-4e54-acf8-9815c9cafa0e tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 900b95b5-fe5a-46c1-909a-f81b82ced0ef] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1601.399507] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-18147140-f297-4e54-acf8-9815c9cafa0e tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 900b95b5-fe5a-46c1-909a-f81b82ced0ef] Volume detach. Driver type: vmdk {{(pid=62405) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1601.399507] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-18147140-f297-4e54-acf8-9815c9cafa0e tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 900b95b5-fe5a-46c1-909a-f81b82ced0ef] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-401389', 'volume_id': '442e8afb-d4f7-4db7-9a25-37612af22952', 'name': 'volume-442e8afb-d4f7-4db7-9a25-37612af22952', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '900b95b5-fe5a-46c1-909a-f81b82ced0ef', 'attached_at': '', 'detached_at': '', 'volume_id': '442e8afb-d4f7-4db7-9a25-37612af22952', 'serial': '442e8afb-d4f7-4db7-9a25-37612af22952'} {{(pid=62405) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1601.400111] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c396227-2b1e-49e2-b5ca-cfebb902c15c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.432381] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5490205b-852d-434b-90e8-fe2b00c77c21 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.449131] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3525e606-7fea-4a3b-b919-5bdc3a9c03af {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.459385] env[62405]: DEBUG oslo_vmware.api [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Task: {'id': task-1947054, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.344903} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1601.460226] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] [instance: 0feaeb5d-9f4a-4166-99b1-f213bc4fa458] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1601.463044] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b36934a0-b702-4e91-b13f-a981c26f23d5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.489230] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b824a966-0e5f-4c1a-a574-3845a24e7ed6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.520383] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] [instance: 0feaeb5d-9f4a-4166-99b1-f213bc4fa458] Reconfiguring VM instance instance-00000021 to attach disk [datastore1] 0feaeb5d-9f4a-4166-99b1-f213bc4fa458/0feaeb5d-9f4a-4166-99b1-f213bc4fa458.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1601.524302] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c61bb7de-e979-4de0-857c-3ab4b2bff161 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.539285] env[62405]: DEBUG nova.compute.manager [None req-282b6a3e-f45f-4a50-9f6c-20b5884135cb tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] [instance: fbedaa93-5968-4b42-b93e-201d2b44b32b] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1601.539501] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-282b6a3e-f45f-4a50-9f6c-20b5884135cb tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] [instance: fbedaa93-5968-4b42-b93e-201d2b44b32b] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1601.553795] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6259b0ea-6edc-4d32-9ae8-ec26537a33f4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.557869] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-18147140-f297-4e54-acf8-9815c9cafa0e tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] The volume has not been displaced from its original location: [datastore1] volume-442e8afb-d4f7-4db7-9a25-37612af22952/volume-442e8afb-d4f7-4db7-9a25-37612af22952.vmdk. No consolidation needed. {{(pid=62405) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1601.565565] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-18147140-f297-4e54-acf8-9815c9cafa0e tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 900b95b5-fe5a-46c1-909a-f81b82ced0ef] Reconfiguring VM instance instance-0000001e to detach disk 2001 {{(pid=62405) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1601.572279] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-827840a5-f2b9-436d-964b-e6ac8460860c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.585534] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947051, 'name': CreateVM_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1601.590376] env[62405]: DEBUG oslo_vmware.api [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Waiting for the task: (returnval){ [ 1601.590376] env[62405]: value = "task-1947056" [ 1601.590376] env[62405]: _type = "Task" [ 1601.590376] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1601.597111] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-282b6a3e-f45f-4a50-9f6c-20b5884135cb tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] [instance: fbedaa93-5968-4b42-b93e-201d2b44b32b] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1601.597382] env[62405]: DEBUG oslo_vmware.api [None req-18147140-f297-4e54-acf8-9815c9cafa0e tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for the task: (returnval){ [ 1601.597382] env[62405]: value = "task-1947057" [ 1601.597382] env[62405]: _type = "Task" [ 1601.597382] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1601.601077] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-592f95ff-89db-4842-be38-52a7f356bb9a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.612220] env[62405]: DEBUG oslo_vmware.api [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Task: {'id': task-1947056, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1601.616019] env[62405]: DEBUG oslo_vmware.api [None req-664be70a-5a90-415b-93d8-68e8e14e073c tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Task: {'id': task-1947055, 'name': PowerOnVM_Task, 'duration_secs': 0.508735} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1601.620728] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-664be70a-5a90-415b-93d8-68e8e14e073c tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] [instance: a73579d1-8647-49fe-98ce-0baffd1a558f] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1601.620976] env[62405]: INFO nova.compute.manager [None req-664be70a-5a90-415b-93d8-68e8e14e073c tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] [instance: a73579d1-8647-49fe-98ce-0baffd1a558f] Took 6.40 seconds to spawn the instance on the hypervisor. [ 1601.621193] env[62405]: DEBUG nova.compute.manager [None req-664be70a-5a90-415b-93d8-68e8e14e073c tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] [instance: a73579d1-8647-49fe-98ce-0baffd1a558f] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1601.621504] env[62405]: DEBUG oslo_vmware.api [None req-18147140-f297-4e54-acf8-9815c9cafa0e tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947057, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1601.621794] env[62405]: DEBUG oslo_vmware.api [None req-282b6a3e-f45f-4a50-9f6c-20b5884135cb tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Waiting for the task: (returnval){ [ 1601.621794] env[62405]: value = "task-1947058" [ 1601.621794] env[62405]: _type = "Task" [ 1601.621794] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1601.625166] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad1714e8-308e-4417-a19c-252075a204bc {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.640684] env[62405]: DEBUG oslo_vmware.api [None req-282b6a3e-f45f-4a50-9f6c-20b5884135cb tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Task: {'id': task-1947058, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1601.668966] env[62405]: DEBUG nova.network.neutron [req-f9a5d0f0-41a2-46d1-aae1-c73c11c95829 req-6a1b0a08-1885-41d2-873d-50a101e9d7d1 service nova] [instance: 23748dfd-7c60-41db-8acb-7b49cf1c27db] Updated VIF entry in instance network info cache for port 666e898c-754c-4b07-b0d9-dac2a9a5bc6d. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1601.670075] env[62405]: DEBUG nova.network.neutron [req-f9a5d0f0-41a2-46d1-aae1-c73c11c95829 req-6a1b0a08-1885-41d2-873d-50a101e9d7d1 service nova] [instance: 23748dfd-7c60-41db-8acb-7b49cf1c27db] Updating instance_info_cache with network_info: [{"id": "666e898c-754c-4b07-b0d9-dac2a9a5bc6d", "address": "fa:16:3e:59:60:92", "network": {"id": "8e7f7222-48db-4dd5-a9e8-9a6d2b598918", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1945720715-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba9083cddcc24345b6ea5d2cbbbec5ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap666e898c-75", "ovs_interfaceid": "666e898c-754c-4b07-b0d9-dac2a9a5bc6d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1601.680982] env[62405]: DEBUG oslo_vmware.api [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Task: {'id': task-1947052, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1602.006437] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947051, 'name': CreateVM_Task, 'duration_secs': 1.325594} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1602.008992] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 23748dfd-7c60-41db-8acb-7b49cf1c27db] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1602.011182] env[62405]: DEBUG oslo_concurrency.lockutils [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1602.011357] env[62405]: DEBUG oslo_concurrency.lockutils [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1602.011671] env[62405]: DEBUG oslo_concurrency.lockutils [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1602.012077] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fd65d479-d17f-4f44-9820-e2f8de8f6ad6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.017617] env[62405]: DEBUG oslo_vmware.api [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Waiting for the task: (returnval){ [ 1602.017617] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]520d2dba-8abb-02f7-e8e1-bd2a523edbd3" [ 1602.017617] env[62405]: _type = "Task" [ 1602.017617] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1602.028080] env[62405]: DEBUG oslo_vmware.api [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]520d2dba-8abb-02f7-e8e1-bd2a523edbd3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1602.102238] env[62405]: DEBUG oslo_vmware.api [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Task: {'id': task-1947056, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1602.111155] env[62405]: DEBUG oslo_vmware.api [None req-18147140-f297-4e54-acf8-9815c9cafa0e tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947057, 'name': ReconfigVM_Task, 'duration_secs': 0.3107} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1602.111458] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-18147140-f297-4e54-acf8-9815c9cafa0e tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 900b95b5-fe5a-46c1-909a-f81b82ced0ef] Reconfigured VM instance instance-0000001e to detach disk 2001 {{(pid=62405) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1602.119018] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-63839730-af54-4cba-af39-67729e291141 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.138898] env[62405]: DEBUG oslo_vmware.api [None req-282b6a3e-f45f-4a50-9f6c-20b5884135cb tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Task: {'id': task-1947058, 'name': PowerOffVM_Task, 'duration_secs': 0.295161} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1602.140314] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-282b6a3e-f45f-4a50-9f6c-20b5884135cb tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] [instance: fbedaa93-5968-4b42-b93e-201d2b44b32b] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1602.144026] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-282b6a3e-f45f-4a50-9f6c-20b5884135cb tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] [instance: fbedaa93-5968-4b42-b93e-201d2b44b32b] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1602.144026] env[62405]: DEBUG oslo_vmware.api [None req-18147140-f297-4e54-acf8-9815c9cafa0e tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for the task: (returnval){ [ 1602.144026] env[62405]: value = "task-1947059" [ 1602.144026] env[62405]: _type = "Task" [ 1602.144026] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1602.144026] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e43d5ff1-3650-408d-9da8-cc4ec3011368 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.153107] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d748f20-53d2-4790-bdd5-91b1f578ff8b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.156356] env[62405]: INFO nova.compute.manager [None req-664be70a-5a90-415b-93d8-68e8e14e073c tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] [instance: a73579d1-8647-49fe-98ce-0baffd1a558f] Took 35.74 seconds to build instance. [ 1602.163324] env[62405]: DEBUG oslo_vmware.api [None req-18147140-f297-4e54-acf8-9815c9cafa0e tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947059, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1602.166460] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63dcb6af-e206-4a86-9783-f8a6512687a1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.204517] env[62405]: DEBUG oslo_concurrency.lockutils [req-f9a5d0f0-41a2-46d1-aae1-c73c11c95829 req-6a1b0a08-1885-41d2-873d-50a101e9d7d1 service nova] Releasing lock "refresh_cache-23748dfd-7c60-41db-8acb-7b49cf1c27db" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1602.206832] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-090f5779-7b50-41c7-a93b-fffeaff97db1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.213065] env[62405]: DEBUG oslo_vmware.api [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Task: {'id': task-1947052, 'name': PowerOnVM_Task, 'duration_secs': 1.100521} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1602.213748] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] [instance: 9e73e2ab-1eac-4aca-905f-a8391d3f5a9b] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1602.214029] env[62405]: INFO nova.compute.manager [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] [instance: 9e73e2ab-1eac-4aca-905f-a8391d3f5a9b] Took 12.49 seconds to spawn the instance on the hypervisor. [ 1602.214259] env[62405]: DEBUG nova.compute.manager [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] [instance: 9e73e2ab-1eac-4aca-905f-a8391d3f5a9b] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1602.215039] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c58abfd-11f0-4458-b38a-a465497e824b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.221788] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-430cff35-1866-4879-91ed-0d0272c22f92 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.244319] env[62405]: DEBUG nova.compute.provider_tree [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1602.246956] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-282b6a3e-f45f-4a50-9f6c-20b5884135cb tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] [instance: fbedaa93-5968-4b42-b93e-201d2b44b32b] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1602.250581] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-282b6a3e-f45f-4a50-9f6c-20b5884135cb tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] [instance: fbedaa93-5968-4b42-b93e-201d2b44b32b] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1602.250581] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-282b6a3e-f45f-4a50-9f6c-20b5884135cb tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Deleting the datastore file [datastore1] fbedaa93-5968-4b42-b93e-201d2b44b32b {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1602.250581] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6c717b94-4e62-4b3d-ac00-a606b249eee5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.254527] env[62405]: DEBUG oslo_vmware.api [None req-282b6a3e-f45f-4a50-9f6c-20b5884135cb tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Waiting for the task: (returnval){ [ 1602.254527] env[62405]: value = "task-1947061" [ 1602.254527] env[62405]: _type = "Task" [ 1602.254527] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1602.263322] env[62405]: DEBUG oslo_vmware.api [None req-282b6a3e-f45f-4a50-9f6c-20b5884135cb tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Task: {'id': task-1947061, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1602.530694] env[62405]: DEBUG oslo_vmware.api [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]520d2dba-8abb-02f7-e8e1-bd2a523edbd3, 'name': SearchDatastore_Task, 'duration_secs': 0.024823} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1602.531038] env[62405]: DEBUG oslo_concurrency.lockutils [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1602.531291] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 23748dfd-7c60-41db-8acb-7b49cf1c27db] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1602.531566] env[62405]: DEBUG oslo_concurrency.lockutils [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1602.531883] env[62405]: DEBUG oslo_concurrency.lockutils [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1602.531961] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1602.532226] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c80c49fd-4a1f-4607-be4a-0756382ef71f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.541092] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1602.541398] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1602.542127] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0bf97270-c508-4f04-9ac5-7dc1a69f32c9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.549677] env[62405]: DEBUG oslo_vmware.api [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Waiting for the task: (returnval){ [ 1602.549677] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]525d3914-a902-f52a-92db-88a6ff643a17" [ 1602.549677] env[62405]: _type = "Task" [ 1602.549677] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1602.557038] env[62405]: DEBUG oslo_vmware.api [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]525d3914-a902-f52a-92db-88a6ff643a17, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1602.600866] env[62405]: DEBUG oslo_vmware.api [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Task: {'id': task-1947056, 'name': ReconfigVM_Task, 'duration_secs': 0.641538} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1602.600866] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] [instance: 0feaeb5d-9f4a-4166-99b1-f213bc4fa458] Reconfigured VM instance instance-00000021 to attach disk [datastore1] 0feaeb5d-9f4a-4166-99b1-f213bc4fa458/0feaeb5d-9f4a-4166-99b1-f213bc4fa458.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1602.601613] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-275e9531-9ee0-4b46-ab34-33316052c399 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.609041] env[62405]: DEBUG oslo_vmware.api [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Waiting for the task: (returnval){ [ 1602.609041] env[62405]: value = "task-1947062" [ 1602.609041] env[62405]: _type = "Task" [ 1602.609041] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1602.620198] env[62405]: DEBUG oslo_vmware.api [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Task: {'id': task-1947062, 'name': Rename_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1602.653710] env[62405]: DEBUG oslo_vmware.api [None req-18147140-f297-4e54-acf8-9815c9cafa0e tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947059, 'name': ReconfigVM_Task, 'duration_secs': 0.396891} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1602.654417] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-18147140-f297-4e54-acf8-9815c9cafa0e tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 900b95b5-fe5a-46c1-909a-f81b82ced0ef] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-401389', 'volume_id': '442e8afb-d4f7-4db7-9a25-37612af22952', 'name': 'volume-442e8afb-d4f7-4db7-9a25-37612af22952', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '900b95b5-fe5a-46c1-909a-f81b82ced0ef', 'attached_at': '', 'detached_at': '', 'volume_id': '442e8afb-d4f7-4db7-9a25-37612af22952', 'serial': '442e8afb-d4f7-4db7-9a25-37612af22952'} {{(pid=62405) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1602.655035] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-18147140-f297-4e54-acf8-9815c9cafa0e tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 900b95b5-fe5a-46c1-909a-f81b82ced0ef] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1602.656663] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de259249-0b49-44c0-8001-b0c9271042e5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.660072] env[62405]: DEBUG oslo_concurrency.lockutils [None req-664be70a-5a90-415b-93d8-68e8e14e073c tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Lock "a73579d1-8647-49fe-98ce-0baffd1a558f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 68.088s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1602.666284] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-18147140-f297-4e54-acf8-9815c9cafa0e tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 900b95b5-fe5a-46c1-909a-f81b82ced0ef] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1602.666284] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-11f57f0d-8eba-41e7-b754-5330b7e2627f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.745593] env[62405]: INFO nova.compute.manager [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] [instance: 9e73e2ab-1eac-4aca-905f-a8391d3f5a9b] Took 43.66 seconds to build instance. [ 1602.749009] env[62405]: DEBUG nova.scheduler.client.report [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1602.766828] env[62405]: DEBUG oslo_vmware.api [None req-282b6a3e-f45f-4a50-9f6c-20b5884135cb tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Task: {'id': task-1947061, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.451856} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1602.767138] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-282b6a3e-f45f-4a50-9f6c-20b5884135cb tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1602.767359] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-282b6a3e-f45f-4a50-9f6c-20b5884135cb tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] [instance: fbedaa93-5968-4b42-b93e-201d2b44b32b] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1602.767546] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-282b6a3e-f45f-4a50-9f6c-20b5884135cb tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] [instance: fbedaa93-5968-4b42-b93e-201d2b44b32b] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1602.767718] env[62405]: INFO nova.compute.manager [None req-282b6a3e-f45f-4a50-9f6c-20b5884135cb tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] [instance: fbedaa93-5968-4b42-b93e-201d2b44b32b] Took 1.23 seconds to destroy the instance on the hypervisor. [ 1602.767959] env[62405]: DEBUG oslo.service.loopingcall [None req-282b6a3e-f45f-4a50-9f6c-20b5884135cb tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1602.768409] env[62405]: DEBUG nova.compute.manager [-] [instance: fbedaa93-5968-4b42-b93e-201d2b44b32b] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1602.768511] env[62405]: DEBUG nova.network.neutron [-] [instance: fbedaa93-5968-4b42-b93e-201d2b44b32b] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1602.833819] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-18147140-f297-4e54-acf8-9815c9cafa0e tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 900b95b5-fe5a-46c1-909a-f81b82ced0ef] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1602.834569] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-18147140-f297-4e54-acf8-9815c9cafa0e tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 900b95b5-fe5a-46c1-909a-f81b82ced0ef] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1602.834969] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-18147140-f297-4e54-acf8-9815c9cafa0e tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Deleting the datastore file [datastore1] 900b95b5-fe5a-46c1-909a-f81b82ced0ef {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1602.835664] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c8f16049-bbbb-4449-a59b-ba36c0e95124 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.846980] env[62405]: DEBUG oslo_vmware.api [None req-18147140-f297-4e54-acf8-9815c9cafa0e tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for the task: (returnval){ [ 1602.846980] env[62405]: value = "task-1947064" [ 1602.846980] env[62405]: _type = "Task" [ 1602.846980] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1602.856916] env[62405]: DEBUG oslo_vmware.api [None req-18147140-f297-4e54-acf8-9815c9cafa0e tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947064, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1603.067717] env[62405]: DEBUG oslo_vmware.api [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]525d3914-a902-f52a-92db-88a6ff643a17, 'name': SearchDatastore_Task, 'duration_secs': 0.01684} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1603.067717] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a648fcdc-0751-4183-8964-4be92c780f31 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.072195] env[62405]: DEBUG oslo_vmware.api [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Waiting for the task: (returnval){ [ 1603.072195] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5232c919-f3d4-05fd-9b02-500057314959" [ 1603.072195] env[62405]: _type = "Task" [ 1603.072195] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1603.082522] env[62405]: DEBUG oslo_vmware.api [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5232c919-f3d4-05fd-9b02-500057314959, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1603.119320] env[62405]: DEBUG oslo_vmware.api [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Task: {'id': task-1947062, 'name': Rename_Task, 'duration_secs': 0.206836} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1603.119604] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] [instance: 0feaeb5d-9f4a-4166-99b1-f213bc4fa458] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1603.119840] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b5231cd8-9080-4615-8a6c-8b088b08e8e9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.122027] env[62405]: DEBUG nova.compute.manager [None req-f5eb06cf-8e41-4492-9b74-cab33d42a00f tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] [instance: a73579d1-8647-49fe-98ce-0baffd1a558f] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1603.122608] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c41c9014-30d8-41d9-b99e-46820b6df2b1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.133905] env[62405]: DEBUG oslo_vmware.api [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Waiting for the task: (returnval){ [ 1603.133905] env[62405]: value = "task-1947065" [ 1603.133905] env[62405]: _type = "Task" [ 1603.133905] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1603.144239] env[62405]: DEBUG oslo_vmware.api [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Task: {'id': task-1947065, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1603.162689] env[62405]: DEBUG nova.compute.manager [None req-22b89288-dc0d-4ebf-b710-e994341eb245 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] [instance: 1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1603.248663] env[62405]: DEBUG oslo_concurrency.lockutils [None req-cb99e9f4-80bc-4e9c-aa52-358fd921963f tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Lock "9e73e2ab-1eac-4aca-905f-a8391d3f5a9b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 70.479s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1603.253640] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.862s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1603.255077] env[62405]: DEBUG nova.compute.manager [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 67bf25ea-5774-4246-a3e6-2aeb0ebf6731] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1603.256736] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.451s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1603.258175] env[62405]: INFO nova.compute.claims [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] [instance: 3c05b8fc-32b8-42e4-b3c2-95d1cdbc3dc8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1603.308080] env[62405]: DEBUG nova.compute.manager [req-54d9760d-b02b-420a-9d6d-e64126b30cd8 req-5110d171-8661-44b2-8f83-86ba18c31962 service nova] [instance: fbedaa93-5968-4b42-b93e-201d2b44b32b] Received event network-vif-deleted-19c7164f-ee95-4382-907d-6f3d78608802 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1603.308430] env[62405]: INFO nova.compute.manager [req-54d9760d-b02b-420a-9d6d-e64126b30cd8 req-5110d171-8661-44b2-8f83-86ba18c31962 service nova] [instance: fbedaa93-5968-4b42-b93e-201d2b44b32b] Neutron deleted interface 19c7164f-ee95-4382-907d-6f3d78608802; detaching it from the instance and deleting it from the info cache [ 1603.308509] env[62405]: DEBUG nova.network.neutron [req-54d9760d-b02b-420a-9d6d-e64126b30cd8 req-5110d171-8661-44b2-8f83-86ba18c31962 service nova] [instance: fbedaa93-5968-4b42-b93e-201d2b44b32b] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1603.352541] env[62405]: DEBUG nova.compute.manager [None req-5c4ac9ae-6923-451c-852f-f4b5ec1baf38 tempest-ServerDiagnosticsTest-1878494644 tempest-ServerDiagnosticsTest-1878494644-project-admin] [instance: 9e73e2ab-1eac-4aca-905f-a8391d3f5a9b] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1603.354510] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f590ff66-9fbe-4af7-86f1-d2d94566e811 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.360115] env[62405]: DEBUG oslo_vmware.api [None req-18147140-f297-4e54-acf8-9815c9cafa0e tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947064, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.399671} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1603.360743] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-18147140-f297-4e54-acf8-9815c9cafa0e tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1603.360966] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-18147140-f297-4e54-acf8-9815c9cafa0e tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 900b95b5-fe5a-46c1-909a-f81b82ced0ef] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1603.361210] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-18147140-f297-4e54-acf8-9815c9cafa0e tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 900b95b5-fe5a-46c1-909a-f81b82ced0ef] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1603.361416] env[62405]: INFO nova.compute.manager [None req-18147140-f297-4e54-acf8-9815c9cafa0e tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 900b95b5-fe5a-46c1-909a-f81b82ced0ef] Took 2.48 seconds to destroy the instance on the hypervisor. [ 1603.361783] env[62405]: DEBUG oslo.service.loopingcall [None req-18147140-f297-4e54-acf8-9815c9cafa0e tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1603.362010] env[62405]: DEBUG nova.compute.manager [-] [instance: 900b95b5-fe5a-46c1-909a-f81b82ced0ef] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1603.362117] env[62405]: DEBUG nova.network.neutron [-] [instance: 900b95b5-fe5a-46c1-909a-f81b82ced0ef] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1603.365979] env[62405]: INFO nova.compute.manager [None req-5c4ac9ae-6923-451c-852f-f4b5ec1baf38 tempest-ServerDiagnosticsTest-1878494644 tempest-ServerDiagnosticsTest-1878494644-project-admin] [instance: 9e73e2ab-1eac-4aca-905f-a8391d3f5a9b] Retrieving diagnostics [ 1603.367071] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6810bff3-43ed-4068-b123-6637c1515b05 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.402496] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b1bf8a2f-d9f3-4a2e-bf6f-b6bc42142216 tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Acquiring lock "a73579d1-8647-49fe-98ce-0baffd1a558f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1603.402808] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b1bf8a2f-d9f3-4a2e-bf6f-b6bc42142216 tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Lock "a73579d1-8647-49fe-98ce-0baffd1a558f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1603.402960] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b1bf8a2f-d9f3-4a2e-bf6f-b6bc42142216 tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Acquiring lock "a73579d1-8647-49fe-98ce-0baffd1a558f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1603.403175] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b1bf8a2f-d9f3-4a2e-bf6f-b6bc42142216 tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Lock "a73579d1-8647-49fe-98ce-0baffd1a558f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1603.403336] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b1bf8a2f-d9f3-4a2e-bf6f-b6bc42142216 tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Lock "a73579d1-8647-49fe-98ce-0baffd1a558f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1603.405631] env[62405]: INFO nova.compute.manager [None req-b1bf8a2f-d9f3-4a2e-bf6f-b6bc42142216 tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] [instance: a73579d1-8647-49fe-98ce-0baffd1a558f] Terminating instance [ 1603.558224] env[62405]: DEBUG nova.network.neutron [-] [instance: fbedaa93-5968-4b42-b93e-201d2b44b32b] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1603.586996] env[62405]: DEBUG oslo_vmware.api [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5232c919-f3d4-05fd-9b02-500057314959, 'name': SearchDatastore_Task, 'duration_secs': 0.018495} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1603.587297] env[62405]: DEBUG oslo_concurrency.lockutils [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1603.587578] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 23748dfd-7c60-41db-8acb-7b49cf1c27db/23748dfd-7c60-41db-8acb-7b49cf1c27db.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1603.587859] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-410b4a92-1ca6-4f54-9e1f-4b067d2c0562 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.595394] env[62405]: DEBUG oslo_vmware.api [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Waiting for the task: (returnval){ [ 1603.595394] env[62405]: value = "task-1947066" [ 1603.595394] env[62405]: _type = "Task" [ 1603.595394] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1603.605967] env[62405]: DEBUG oslo_vmware.api [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947066, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1603.635588] env[62405]: INFO nova.compute.manager [None req-f5eb06cf-8e41-4492-9b74-cab33d42a00f tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] [instance: a73579d1-8647-49fe-98ce-0baffd1a558f] instance snapshotting [ 1603.636287] env[62405]: DEBUG nova.objects.instance [None req-f5eb06cf-8e41-4492-9b74-cab33d42a00f tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Lazy-loading 'flavor' on Instance uuid a73579d1-8647-49fe-98ce-0baffd1a558f {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1603.649712] env[62405]: DEBUG oslo_vmware.api [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Task: {'id': task-1947065, 'name': PowerOnVM_Task} progress is 1%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1603.694706] env[62405]: DEBUG oslo_concurrency.lockutils [None req-22b89288-dc0d-4ebf-b710-e994341eb245 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1603.751493] env[62405]: DEBUG nova.compute.manager [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] [instance: c392d6f3-b638-4857-826d-760c38b7d291] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1603.764697] env[62405]: DEBUG nova.compute.utils [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1603.770467] env[62405]: DEBUG nova.compute.manager [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 67bf25ea-5774-4246-a3e6-2aeb0ebf6731] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1603.770467] env[62405]: DEBUG nova.network.neutron [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 67bf25ea-5774-4246-a3e6-2aeb0ebf6731] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1603.813134] env[62405]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cbae8a77-8649-4245-85b6-d1d268ca69d8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.824722] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d1e9d40-50ea-446c-b034-0cbb3779f1f4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.866645] env[62405]: DEBUG nova.compute.manager [req-54d9760d-b02b-420a-9d6d-e64126b30cd8 req-5110d171-8661-44b2-8f83-86ba18c31962 service nova] [instance: fbedaa93-5968-4b42-b93e-201d2b44b32b] Detach interface failed, port_id=19c7164f-ee95-4382-907d-6f3d78608802, reason: Instance fbedaa93-5968-4b42-b93e-201d2b44b32b could not be found. {{(pid=62405) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11483}} [ 1603.868763] env[62405]: DEBUG nova.policy [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '96a739701a824313b30b0d214f43757b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6014bab6bc9a4b059bab88e44b31f446', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1603.908998] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b1bf8a2f-d9f3-4a2e-bf6f-b6bc42142216 tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Acquiring lock "refresh_cache-a73579d1-8647-49fe-98ce-0baffd1a558f" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1603.909209] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b1bf8a2f-d9f3-4a2e-bf6f-b6bc42142216 tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Acquired lock "refresh_cache-a73579d1-8647-49fe-98ce-0baffd1a558f" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1603.909386] env[62405]: DEBUG nova.network.neutron [None req-b1bf8a2f-d9f3-4a2e-bf6f-b6bc42142216 tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] [instance: a73579d1-8647-49fe-98ce-0baffd1a558f] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1604.060507] env[62405]: INFO nova.compute.manager [-] [instance: fbedaa93-5968-4b42-b93e-201d2b44b32b] Took 1.29 seconds to deallocate network for instance. [ 1604.111219] env[62405]: DEBUG oslo_vmware.api [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947066, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1604.148650] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a02ef745-9450-4f6a-aca8-bc0fe9c8c244 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.156198] env[62405]: DEBUG nova.network.neutron [-] [instance: 900b95b5-fe5a-46c1-909a-f81b82ced0ef] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1604.157509] env[62405]: DEBUG oslo_vmware.api [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Task: {'id': task-1947065, 'name': PowerOnVM_Task} progress is 82%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1604.174841] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f15a3929-5240-4167-95fc-a942bfce91e3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.273436] env[62405]: DEBUG nova.compute.manager [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 67bf25ea-5774-4246-a3e6-2aeb0ebf6731] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1604.280364] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1604.436235] env[62405]: DEBUG nova.network.neutron [None req-b1bf8a2f-d9f3-4a2e-bf6f-b6bc42142216 tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] [instance: a73579d1-8647-49fe-98ce-0baffd1a558f] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1604.439134] env[62405]: DEBUG nova.network.neutron [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 67bf25ea-5774-4246-a3e6-2aeb0ebf6731] Successfully created port: 165104f7-de0a-47de-a4a4-918b51216f4d {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1604.523072] env[62405]: DEBUG nova.network.neutron [None req-b1bf8a2f-d9f3-4a2e-bf6f-b6bc42142216 tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] [instance: a73579d1-8647-49fe-98ce-0baffd1a558f] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1604.578088] env[62405]: DEBUG oslo_concurrency.lockutils [None req-282b6a3e-f45f-4a50-9f6c-20b5884135cb tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1604.607790] env[62405]: DEBUG oslo_concurrency.lockutils [None req-cde06c7d-9666-40ff-a9a7-26f752146d74 tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Acquiring lock "9e73e2ab-1eac-4aca-905f-a8391d3f5a9b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1604.608054] env[62405]: DEBUG oslo_concurrency.lockutils [None req-cde06c7d-9666-40ff-a9a7-26f752146d74 tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Lock "9e73e2ab-1eac-4aca-905f-a8391d3f5a9b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1604.608275] env[62405]: DEBUG oslo_concurrency.lockutils [None req-cde06c7d-9666-40ff-a9a7-26f752146d74 tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Acquiring lock "9e73e2ab-1eac-4aca-905f-a8391d3f5a9b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1604.608462] env[62405]: DEBUG oslo_concurrency.lockutils [None req-cde06c7d-9666-40ff-a9a7-26f752146d74 tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Lock "9e73e2ab-1eac-4aca-905f-a8391d3f5a9b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1604.608628] env[62405]: DEBUG oslo_concurrency.lockutils [None req-cde06c7d-9666-40ff-a9a7-26f752146d74 tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Lock "9e73e2ab-1eac-4aca-905f-a8391d3f5a9b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1604.610207] env[62405]: DEBUG oslo_vmware.api [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947066, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.739972} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1604.612950] env[62405]: INFO nova.compute.manager [None req-cde06c7d-9666-40ff-a9a7-26f752146d74 tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] [instance: 9e73e2ab-1eac-4aca-905f-a8391d3f5a9b] Terminating instance [ 1604.614440] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 23748dfd-7c60-41db-8acb-7b49cf1c27db/23748dfd-7c60-41db-8acb-7b49cf1c27db.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1604.614657] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 23748dfd-7c60-41db-8acb-7b49cf1c27db] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1604.615924] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c19d330e-414a-4da2-a2b9-4f9b78950098 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.623610] env[62405]: DEBUG oslo_vmware.api [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Waiting for the task: (returnval){ [ 1604.623610] env[62405]: value = "task-1947067" [ 1604.623610] env[62405]: _type = "Task" [ 1604.623610] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1604.634720] env[62405]: DEBUG oslo_vmware.api [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947067, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1604.651722] env[62405]: DEBUG oslo_vmware.api [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Task: {'id': task-1947065, 'name': PowerOnVM_Task, 'duration_secs': 1.318374} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1604.652463] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] [instance: 0feaeb5d-9f4a-4166-99b1-f213bc4fa458] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1604.652463] env[62405]: INFO nova.compute.manager [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] [instance: 0feaeb5d-9f4a-4166-99b1-f213bc4fa458] Took 12.13 seconds to spawn the instance on the hypervisor. [ 1604.652622] env[62405]: DEBUG nova.compute.manager [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] [instance: 0feaeb5d-9f4a-4166-99b1-f213bc4fa458] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1604.653523] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-442226d3-a722-4399-a084-c0518068e351 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.659384] env[62405]: INFO nova.compute.manager [-] [instance: 900b95b5-fe5a-46c1-909a-f81b82ced0ef] Took 1.30 seconds to deallocate network for instance. [ 1604.687751] env[62405]: DEBUG nova.compute.manager [None req-f5eb06cf-8e41-4492-9b74-cab33d42a00f tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] [instance: a73579d1-8647-49fe-98ce-0baffd1a558f] Instance disappeared during snapshot {{(pid=62405) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4595}} [ 1604.869892] env[62405]: DEBUG nova.compute.manager [None req-f5eb06cf-8e41-4492-9b74-cab33d42a00f tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] [instance: a73579d1-8647-49fe-98ce-0baffd1a558f] Found 0 images (rotation: 2) {{(pid=62405) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 1604.904025] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0664745e-19c3-4224-9526-f4b073074f1a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.913096] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-408c0154-a711-4072-ada1-397302652ce1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.950845] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7db4a7c7-5416-4651-9d04-d9ca8e2317af {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.960413] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-709e60d5-eeaf-4a11-b4ee-ab92500bae52 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.975910] env[62405]: DEBUG nova.compute.provider_tree [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1605.026030] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b1bf8a2f-d9f3-4a2e-bf6f-b6bc42142216 tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Releasing lock "refresh_cache-a73579d1-8647-49fe-98ce-0baffd1a558f" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1605.026579] env[62405]: DEBUG nova.compute.manager [None req-b1bf8a2f-d9f3-4a2e-bf6f-b6bc42142216 tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] [instance: a73579d1-8647-49fe-98ce-0baffd1a558f] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1605.026816] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-b1bf8a2f-d9f3-4a2e-bf6f-b6bc42142216 tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] [instance: a73579d1-8647-49fe-98ce-0baffd1a558f] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1605.027738] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15169388-f83a-418d-933e-c35284dd5a4a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.037082] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1bf8a2f-d9f3-4a2e-bf6f-b6bc42142216 tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] [instance: a73579d1-8647-49fe-98ce-0baffd1a558f] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1605.037376] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9617ee2a-df75-4f27-ac49-25a84ada4672 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.044492] env[62405]: DEBUG oslo_vmware.api [None req-b1bf8a2f-d9f3-4a2e-bf6f-b6bc42142216 tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Waiting for the task: (returnval){ [ 1605.044492] env[62405]: value = "task-1947068" [ 1605.044492] env[62405]: _type = "Task" [ 1605.044492] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1605.053419] env[62405]: DEBUG oslo_vmware.api [None req-b1bf8a2f-d9f3-4a2e-bf6f-b6bc42142216 tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Task: {'id': task-1947068, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1605.119366] env[62405]: DEBUG nova.compute.manager [None req-cde06c7d-9666-40ff-a9a7-26f752146d74 tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] [instance: 9e73e2ab-1eac-4aca-905f-a8391d3f5a9b] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1605.119632] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-cde06c7d-9666-40ff-a9a7-26f752146d74 tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] [instance: 9e73e2ab-1eac-4aca-905f-a8391d3f5a9b] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1605.120823] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d54a7e4b-cd37-421b-b0c9-fecdb8a83bbd {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.134887] env[62405]: DEBUG oslo_vmware.api [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947067, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077303} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1605.137586] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 23748dfd-7c60-41db-8acb-7b49cf1c27db] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1605.138666] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-cde06c7d-9666-40ff-a9a7-26f752146d74 tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] [instance: 9e73e2ab-1eac-4aca-905f-a8391d3f5a9b] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1605.139649] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f06f2c2a-147a-46a1-9fea-f92502eb1cd1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.142623] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1a7306e4-894d-4727-921b-dc0fa9a5bf6a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.167310] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 23748dfd-7c60-41db-8acb-7b49cf1c27db] Reconfiguring VM instance instance-00000023 to attach disk [datastore1] 23748dfd-7c60-41db-8acb-7b49cf1c27db/23748dfd-7c60-41db-8acb-7b49cf1c27db.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1605.169868] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b62b41e0-65ba-46a4-889c-b43ebf08cbc9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.193401] env[62405]: DEBUG oslo_vmware.api [None req-cde06c7d-9666-40ff-a9a7-26f752146d74 tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Waiting for the task: (returnval){ [ 1605.193401] env[62405]: value = "task-1947069" [ 1605.193401] env[62405]: _type = "Task" [ 1605.193401] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1605.199246] env[62405]: INFO nova.compute.manager [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] [instance: 0feaeb5d-9f4a-4166-99b1-f213bc4fa458] Took 44.25 seconds to build instance. [ 1605.202615] env[62405]: DEBUG oslo_vmware.api [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Waiting for the task: (returnval){ [ 1605.202615] env[62405]: value = "task-1947070" [ 1605.202615] env[62405]: _type = "Task" [ 1605.202615] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1605.206987] env[62405]: DEBUG oslo_vmware.api [None req-cde06c7d-9666-40ff-a9a7-26f752146d74 tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Task: {'id': task-1947069, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1605.215585] env[62405]: DEBUG oslo_vmware.api [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947070, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1605.245255] env[62405]: INFO nova.compute.manager [None req-18147140-f297-4e54-acf8-9815c9cafa0e tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 900b95b5-fe5a-46c1-909a-f81b82ced0ef] Took 0.58 seconds to detach 1 volumes for instance. [ 1605.289968] env[62405]: DEBUG nova.compute.manager [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 67bf25ea-5774-4246-a3e6-2aeb0ebf6731] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1605.328447] env[62405]: DEBUG nova.virt.hardware [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1605.329290] env[62405]: DEBUG nova.virt.hardware [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1605.329844] env[62405]: DEBUG nova.virt.hardware [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1605.330090] env[62405]: DEBUG nova.virt.hardware [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1605.330382] env[62405]: DEBUG nova.virt.hardware [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1605.332041] env[62405]: DEBUG nova.virt.hardware [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1605.332041] env[62405]: DEBUG nova.virt.hardware [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1605.332041] env[62405]: DEBUG nova.virt.hardware [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1605.332041] env[62405]: DEBUG nova.virt.hardware [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1605.332041] env[62405]: DEBUG nova.virt.hardware [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1605.332279] env[62405]: DEBUG nova.virt.hardware [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1605.333530] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0ed6848-e6ff-449b-ad9f-09c3979fea83 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.345819] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e97e446b-4ac9-4424-9639-2edcdb9c207f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.479796] env[62405]: DEBUG nova.scheduler.client.report [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1605.554580] env[62405]: DEBUG oslo_vmware.api [None req-b1bf8a2f-d9f3-4a2e-bf6f-b6bc42142216 tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Task: {'id': task-1947068, 'name': PowerOffVM_Task, 'duration_secs': 0.138997} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1605.555220] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1bf8a2f-d9f3-4a2e-bf6f-b6bc42142216 tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] [instance: a73579d1-8647-49fe-98ce-0baffd1a558f] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1605.555220] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-b1bf8a2f-d9f3-4a2e-bf6f-b6bc42142216 tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] [instance: a73579d1-8647-49fe-98ce-0baffd1a558f] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1605.555361] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-433aa0a8-d8e6-45de-beef-9d2e06083821 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.584286] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-b1bf8a2f-d9f3-4a2e-bf6f-b6bc42142216 tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] [instance: a73579d1-8647-49fe-98ce-0baffd1a558f] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1605.584286] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-b1bf8a2f-d9f3-4a2e-bf6f-b6bc42142216 tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] [instance: a73579d1-8647-49fe-98ce-0baffd1a558f] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1605.584286] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1bf8a2f-d9f3-4a2e-bf6f-b6bc42142216 tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Deleting the datastore file [datastore1] a73579d1-8647-49fe-98ce-0baffd1a558f {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1605.584286] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-18246ec9-2180-445b-bb07-8c716c303b02 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.591553] env[62405]: DEBUG oslo_vmware.api [None req-b1bf8a2f-d9f3-4a2e-bf6f-b6bc42142216 tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Waiting for the task: (returnval){ [ 1605.591553] env[62405]: value = "task-1947072" [ 1605.591553] env[62405]: _type = "Task" [ 1605.591553] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1605.600572] env[62405]: DEBUG oslo_vmware.api [None req-b1bf8a2f-d9f3-4a2e-bf6f-b6bc42142216 tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Task: {'id': task-1947072, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1605.627040] env[62405]: DEBUG nova.compute.manager [req-758b02a7-0204-414f-a4d3-54288f1c2798 req-27e6a610-4374-45d4-9466-dd22aab3d94f service nova] [instance: 900b95b5-fe5a-46c1-909a-f81b82ced0ef] Received event network-vif-deleted-cf9dc646-dc3f-46c3-9291-5f2caa585662 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1605.703416] env[62405]: DEBUG oslo_concurrency.lockutils [None req-282d5e02-5dab-4873-8ae3-e4c95ea610cd tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Lock "0feaeb5d-9f4a-4166-99b1-f213bc4fa458" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 71.769s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1605.709067] env[62405]: DEBUG oslo_vmware.api [None req-cde06c7d-9666-40ff-a9a7-26f752146d74 tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Task: {'id': task-1947069, 'name': PowerOffVM_Task, 'duration_secs': 0.276263} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1605.713060] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-cde06c7d-9666-40ff-a9a7-26f752146d74 tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] [instance: 9e73e2ab-1eac-4aca-905f-a8391d3f5a9b] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1605.713309] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-cde06c7d-9666-40ff-a9a7-26f752146d74 tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] [instance: 9e73e2ab-1eac-4aca-905f-a8391d3f5a9b] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1605.713803] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3ff9a3e7-ca86-412c-8a50-7b5da63e5ca0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.722452] env[62405]: DEBUG oslo_vmware.api [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947070, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1605.752914] env[62405]: DEBUG oslo_concurrency.lockutils [None req-18147140-f297-4e54-acf8-9815c9cafa0e tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1605.826326] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-cde06c7d-9666-40ff-a9a7-26f752146d74 tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] [instance: 9e73e2ab-1eac-4aca-905f-a8391d3f5a9b] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1605.826326] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-cde06c7d-9666-40ff-a9a7-26f752146d74 tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] [instance: 9e73e2ab-1eac-4aca-905f-a8391d3f5a9b] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1605.826326] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-cde06c7d-9666-40ff-a9a7-26f752146d74 tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Deleting the datastore file [datastore1] 9e73e2ab-1eac-4aca-905f-a8391d3f5a9b {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1605.826610] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-412f827e-e8bc-4ac1-ace9-31a04be06c3d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.833653] env[62405]: DEBUG oslo_vmware.api [None req-cde06c7d-9666-40ff-a9a7-26f752146d74 tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Waiting for the task: (returnval){ [ 1605.833653] env[62405]: value = "task-1947074" [ 1605.833653] env[62405]: _type = "Task" [ 1605.833653] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1605.842194] env[62405]: DEBUG oslo_vmware.api [None req-cde06c7d-9666-40ff-a9a7-26f752146d74 tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Task: {'id': task-1947074, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1605.987597] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.728s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1605.987597] env[62405]: DEBUG nova.compute.manager [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] [instance: 3c05b8fc-32b8-42e4-b3c2-95d1cdbc3dc8] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1605.987889] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.736s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1605.993019] env[62405]: INFO nova.compute.claims [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 4d59d9fd-23df-4933-97ed-32602e51e9aa] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1606.104128] env[62405]: DEBUG oslo_vmware.api [None req-b1bf8a2f-d9f3-4a2e-bf6f-b6bc42142216 tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Task: {'id': task-1947072, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.154303} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1606.105358] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1bf8a2f-d9f3-4a2e-bf6f-b6bc42142216 tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1606.108537] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-b1bf8a2f-d9f3-4a2e-bf6f-b6bc42142216 tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] [instance: a73579d1-8647-49fe-98ce-0baffd1a558f] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1606.109083] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-b1bf8a2f-d9f3-4a2e-bf6f-b6bc42142216 tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] [instance: a73579d1-8647-49fe-98ce-0baffd1a558f] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1606.109083] env[62405]: INFO nova.compute.manager [None req-b1bf8a2f-d9f3-4a2e-bf6f-b6bc42142216 tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] [instance: a73579d1-8647-49fe-98ce-0baffd1a558f] Took 1.08 seconds to destroy the instance on the hypervisor. [ 1606.109219] env[62405]: DEBUG oslo.service.loopingcall [None req-b1bf8a2f-d9f3-4a2e-bf6f-b6bc42142216 tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1606.109780] env[62405]: DEBUG nova.compute.manager [-] [instance: a73579d1-8647-49fe-98ce-0baffd1a558f] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1606.109881] env[62405]: DEBUG nova.network.neutron [-] [instance: a73579d1-8647-49fe-98ce-0baffd1a558f] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1606.217611] env[62405]: DEBUG nova.compute.manager [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59957a81-5297-43d3-a673-024a53a19116] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1606.226731] env[62405]: DEBUG oslo_vmware.api [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947070, 'name': ReconfigVM_Task, 'duration_secs': 0.612837} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1606.229555] env[62405]: DEBUG oslo_vmware.rw_handles [None req-9d6ef053-41e4-4b22-9fea-f4aaa6f8dc94 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52d3c43c-bb71-e201-5bdf-a7ce93428a4c/disk-0.vmdk. {{(pid=62405) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1606.229950] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 23748dfd-7c60-41db-8acb-7b49cf1c27db] Reconfigured VM instance instance-00000023 to attach disk [datastore1] 23748dfd-7c60-41db-8acb-7b49cf1c27db/23748dfd-7c60-41db-8acb-7b49cf1c27db.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1606.231120] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd2734a0-2ee9-4441-a129-0d53d942c3d0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.235491] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f2041586-1252-4880-b221-6262be3e1945 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.238842] env[62405]: DEBUG oslo_vmware.rw_handles [None req-9d6ef053-41e4-4b22-9fea-f4aaa6f8dc94 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52d3c43c-bb71-e201-5bdf-a7ce93428a4c/disk-0.vmdk is in state: ready. {{(pid=62405) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1606.239051] env[62405]: ERROR oslo_vmware.rw_handles [None req-9d6ef053-41e4-4b22-9fea-f4aaa6f8dc94 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52d3c43c-bb71-e201-5bdf-a7ce93428a4c/disk-0.vmdk due to incomplete transfer. [ 1606.239448] env[62405]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-b9a0c01f-d8c9-4806-8d62-5dc9ab280409 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.241978] env[62405]: DEBUG oslo_vmware.api [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Waiting for the task: (returnval){ [ 1606.241978] env[62405]: value = "task-1947075" [ 1606.241978] env[62405]: _type = "Task" [ 1606.241978] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1606.249223] env[62405]: DEBUG oslo_vmware.rw_handles [None req-9d6ef053-41e4-4b22-9fea-f4aaa6f8dc94 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52d3c43c-bb71-e201-5bdf-a7ce93428a4c/disk-0.vmdk. {{(pid=62405) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1606.249223] env[62405]: DEBUG nova.virt.vmwareapi.images [None req-9d6ef053-41e4-4b22-9fea-f4aaa6f8dc94 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b8ff115b-64f1-4584-afa2-478c5e6b726b] Uploaded image e4cce3e1-804f-4642-8e51-dd15f688fec0 to the Glance image server {{(pid=62405) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1606.252665] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d6ef053-41e4-4b22-9fea-f4aaa6f8dc94 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b8ff115b-64f1-4584-afa2-478c5e6b726b] Destroying the VM {{(pid=62405) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1606.252665] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-e50265a2-f30e-4fbc-9592-1db8a3903d12 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.257589] env[62405]: DEBUG oslo_vmware.api [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947075, 'name': Rename_Task} progress is 10%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1606.263918] env[62405]: DEBUG oslo_vmware.api [None req-9d6ef053-41e4-4b22-9fea-f4aaa6f8dc94 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Waiting for the task: (returnval){ [ 1606.263918] env[62405]: value = "task-1947076" [ 1606.263918] env[62405]: _type = "Task" [ 1606.263918] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1606.273106] env[62405]: DEBUG oslo_vmware.api [None req-9d6ef053-41e4-4b22-9fea-f4aaa6f8dc94 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': task-1947076, 'name': Destroy_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1606.282283] env[62405]: DEBUG nova.network.neutron [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 67bf25ea-5774-4246-a3e6-2aeb0ebf6731] Successfully updated port: 165104f7-de0a-47de-a4a4-918b51216f4d {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1606.347278] env[62405]: DEBUG oslo_vmware.api [None req-cde06c7d-9666-40ff-a9a7-26f752146d74 tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Task: {'id': task-1947074, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.259248} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1606.348234] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-cde06c7d-9666-40ff-a9a7-26f752146d74 tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1606.348234] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-cde06c7d-9666-40ff-a9a7-26f752146d74 tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] [instance: 9e73e2ab-1eac-4aca-905f-a8391d3f5a9b] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1606.348234] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-cde06c7d-9666-40ff-a9a7-26f752146d74 tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] [instance: 9e73e2ab-1eac-4aca-905f-a8391d3f5a9b] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1606.348548] env[62405]: INFO nova.compute.manager [None req-cde06c7d-9666-40ff-a9a7-26f752146d74 tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] [instance: 9e73e2ab-1eac-4aca-905f-a8391d3f5a9b] Took 1.23 seconds to destroy the instance on the hypervisor. [ 1606.349157] env[62405]: DEBUG oslo.service.loopingcall [None req-cde06c7d-9666-40ff-a9a7-26f752146d74 tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1606.349157] env[62405]: DEBUG nova.compute.manager [-] [instance: 9e73e2ab-1eac-4aca-905f-a8391d3f5a9b] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1606.349316] env[62405]: DEBUG nova.network.neutron [-] [instance: 9e73e2ab-1eac-4aca-905f-a8391d3f5a9b] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1606.397398] env[62405]: DEBUG nova.network.neutron [-] [instance: a73579d1-8647-49fe-98ce-0baffd1a558f] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1606.496574] env[62405]: DEBUG nova.compute.utils [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1606.498098] env[62405]: DEBUG nova.compute.manager [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] [instance: 3c05b8fc-32b8-42e4-b3c2-95d1cdbc3dc8] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1606.498273] env[62405]: DEBUG nova.network.neutron [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] [instance: 3c05b8fc-32b8-42e4-b3c2-95d1cdbc3dc8] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1606.537018] env[62405]: DEBUG nova.compute.manager [req-2862f288-2748-4a28-8478-5dca82bfe26c req-2a5ec876-432c-49e6-97e7-82aac518615d service nova] [instance: 0feaeb5d-9f4a-4166-99b1-f213bc4fa458] Received event network-changed-feb1471c-63ad-4e63-bd9d-e413dee50694 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1606.537018] env[62405]: DEBUG nova.compute.manager [req-2862f288-2748-4a28-8478-5dca82bfe26c req-2a5ec876-432c-49e6-97e7-82aac518615d service nova] [instance: 0feaeb5d-9f4a-4166-99b1-f213bc4fa458] Refreshing instance network info cache due to event network-changed-feb1471c-63ad-4e63-bd9d-e413dee50694. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1606.537018] env[62405]: DEBUG oslo_concurrency.lockutils [req-2862f288-2748-4a28-8478-5dca82bfe26c req-2a5ec876-432c-49e6-97e7-82aac518615d service nova] Acquiring lock "refresh_cache-0feaeb5d-9f4a-4166-99b1-f213bc4fa458" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1606.537018] env[62405]: DEBUG oslo_concurrency.lockutils [req-2862f288-2748-4a28-8478-5dca82bfe26c req-2a5ec876-432c-49e6-97e7-82aac518615d service nova] Acquired lock "refresh_cache-0feaeb5d-9f4a-4166-99b1-f213bc4fa458" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1606.537018] env[62405]: DEBUG nova.network.neutron [req-2862f288-2748-4a28-8478-5dca82bfe26c req-2a5ec876-432c-49e6-97e7-82aac518615d service nova] [instance: 0feaeb5d-9f4a-4166-99b1-f213bc4fa458] Refreshing network info cache for port feb1471c-63ad-4e63-bd9d-e413dee50694 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1606.571333] env[62405]: DEBUG nova.policy [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '67bc4facd962452599db5f2f2b232997', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9b8a0d474a3e4c6293a74944e354a7c4', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1606.749620] env[62405]: DEBUG oslo_concurrency.lockutils [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1606.755755] env[62405]: DEBUG oslo_vmware.api [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947075, 'name': Rename_Task, 'duration_secs': 0.191107} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1606.756020] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 23748dfd-7c60-41db-8acb-7b49cf1c27db] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1606.756285] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2fc5ad73-5828-4fc0-98d2-b6132b391a02 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.763281] env[62405]: DEBUG oslo_vmware.api [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Waiting for the task: (returnval){ [ 1606.763281] env[62405]: value = "task-1947077" [ 1606.763281] env[62405]: _type = "Task" [ 1606.763281] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1606.774829] env[62405]: DEBUG oslo_vmware.api [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947077, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1606.778008] env[62405]: DEBUG oslo_vmware.api [None req-9d6ef053-41e4-4b22-9fea-f4aaa6f8dc94 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': task-1947076, 'name': Destroy_Task, 'duration_secs': 0.431722} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1606.778277] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-9d6ef053-41e4-4b22-9fea-f4aaa6f8dc94 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b8ff115b-64f1-4584-afa2-478c5e6b726b] Destroyed the VM [ 1606.778527] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-9d6ef053-41e4-4b22-9fea-f4aaa6f8dc94 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b8ff115b-64f1-4584-afa2-478c5e6b726b] Deleting Snapshot of the VM instance {{(pid=62405) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1606.778759] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-b93b03e1-6eb3-40f9-a6b6-7771071bd1c6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.784897] env[62405]: DEBUG oslo_vmware.api [None req-9d6ef053-41e4-4b22-9fea-f4aaa6f8dc94 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Waiting for the task: (returnval){ [ 1606.784897] env[62405]: value = "task-1947078" [ 1606.784897] env[62405]: _type = "Task" [ 1606.784897] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1606.788497] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Acquiring lock "refresh_cache-67bf25ea-5774-4246-a3e6-2aeb0ebf6731" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1606.788624] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Acquired lock "refresh_cache-67bf25ea-5774-4246-a3e6-2aeb0ebf6731" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1606.788772] env[62405]: DEBUG nova.network.neutron [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 67bf25ea-5774-4246-a3e6-2aeb0ebf6731] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1606.796535] env[62405]: DEBUG oslo_vmware.api [None req-9d6ef053-41e4-4b22-9fea-f4aaa6f8dc94 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': task-1947078, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1606.900856] env[62405]: DEBUG nova.network.neutron [-] [instance: a73579d1-8647-49fe-98ce-0baffd1a558f] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1607.002109] env[62405]: DEBUG nova.compute.manager [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] [instance: 3c05b8fc-32b8-42e4-b3c2-95d1cdbc3dc8] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1607.159574] env[62405]: DEBUG nova.network.neutron [-] [instance: 9e73e2ab-1eac-4aca-905f-a8391d3f5a9b] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1607.276236] env[62405]: DEBUG oslo_vmware.api [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947077, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1607.294202] env[62405]: DEBUG oslo_vmware.api [None req-9d6ef053-41e4-4b22-9fea-f4aaa6f8dc94 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': task-1947078, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1607.327184] env[62405]: DEBUG nova.network.neutron [req-2862f288-2748-4a28-8478-5dca82bfe26c req-2a5ec876-432c-49e6-97e7-82aac518615d service nova] [instance: 0feaeb5d-9f4a-4166-99b1-f213bc4fa458] Updated VIF entry in instance network info cache for port feb1471c-63ad-4e63-bd9d-e413dee50694. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1607.327754] env[62405]: DEBUG nova.network.neutron [req-2862f288-2748-4a28-8478-5dca82bfe26c req-2a5ec876-432c-49e6-97e7-82aac518615d service nova] [instance: 0feaeb5d-9f4a-4166-99b1-f213bc4fa458] Updating instance_info_cache with network_info: [{"id": "feb1471c-63ad-4e63-bd9d-e413dee50694", "address": "fa:16:3e:17:75:40", "network": {"id": "bf574ed2-2a7e-4cf2-aa38-0adccf456674", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-2099360932-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8cf1f39c8aef41df8c86777f80980664", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60567ee6-01d0-4b16-9c7a-4a896827d6eb", "external-id": "nsx-vlan-transportzone-28", "segmentation_id": 28, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfeb1471c-63", "ovs_interfaceid": "feb1471c-63ad-4e63-bd9d-e413dee50694", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1607.402270] env[62405]: DEBUG nova.network.neutron [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] [instance: 3c05b8fc-32b8-42e4-b3c2-95d1cdbc3dc8] Successfully created port: 580e1cca-cd84-4ad1-a63a-0ace6a2eb2e6 {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1607.407509] env[62405]: INFO nova.compute.manager [-] [instance: a73579d1-8647-49fe-98ce-0baffd1a558f] Took 1.30 seconds to deallocate network for instance. [ 1607.425415] env[62405]: DEBUG nova.network.neutron [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 67bf25ea-5774-4246-a3e6-2aeb0ebf6731] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1607.583420] env[62405]: DEBUG oslo_concurrency.lockutils [None req-00f6c200-c770-4640-9621-b9bd0353c76a tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Acquiring lock "0feaeb5d-9f4a-4166-99b1-f213bc4fa458" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1607.585897] env[62405]: DEBUG oslo_concurrency.lockutils [None req-00f6c200-c770-4640-9621-b9bd0353c76a tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Lock "0feaeb5d-9f4a-4166-99b1-f213bc4fa458" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1607.585897] env[62405]: INFO nova.compute.manager [None req-00f6c200-c770-4640-9621-b9bd0353c76a tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] [instance: 0feaeb5d-9f4a-4166-99b1-f213bc4fa458] Rebooting instance [ 1607.619018] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-961c6ef4-6b3b-4354-b55c-8c376ac64160 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.632253] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcd7577c-a514-4946-8340-39a64ccdb0f5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.669622] env[62405]: INFO nova.compute.manager [-] [instance: 9e73e2ab-1eac-4aca-905f-a8391d3f5a9b] Took 1.32 seconds to deallocate network for instance. [ 1607.679021] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6830acf4-8fbc-4ddd-afeb-4b1164d8d8fc {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.685897] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9d5d671-5a8f-4d63-a969-b511abf2708d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.702479] env[62405]: DEBUG nova.compute.provider_tree [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1607.774357] env[62405]: DEBUG oslo_vmware.api [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947077, 'name': PowerOnVM_Task, 'duration_secs': 0.573464} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1607.774774] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 23748dfd-7c60-41db-8acb-7b49cf1c27db] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1607.774836] env[62405]: INFO nova.compute.manager [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 23748dfd-7c60-41db-8acb-7b49cf1c27db] Took 9.78 seconds to spawn the instance on the hypervisor. [ 1607.774970] env[62405]: DEBUG nova.compute.manager [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 23748dfd-7c60-41db-8acb-7b49cf1c27db] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1607.775732] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de7f5708-c346-4eb1-8f1b-a2801de4189b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.795280] env[62405]: DEBUG oslo_vmware.api [None req-9d6ef053-41e4-4b22-9fea-f4aaa6f8dc94 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': task-1947078, 'name': RemoveSnapshot_Task, 'duration_secs': 0.520672} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1607.795521] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-9d6ef053-41e4-4b22-9fea-f4aaa6f8dc94 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b8ff115b-64f1-4584-afa2-478c5e6b726b] Deleted Snapshot of the VM instance {{(pid=62405) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1607.795744] env[62405]: INFO nova.compute.manager [None req-9d6ef053-41e4-4b22-9fea-f4aaa6f8dc94 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b8ff115b-64f1-4584-afa2-478c5e6b726b] Took 18.11 seconds to snapshot the instance on the hypervisor. [ 1607.831753] env[62405]: DEBUG oslo_concurrency.lockutils [req-2862f288-2748-4a28-8478-5dca82bfe26c req-2a5ec876-432c-49e6-97e7-82aac518615d service nova] Releasing lock "refresh_cache-0feaeb5d-9f4a-4166-99b1-f213bc4fa458" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1607.836404] env[62405]: DEBUG nova.network.neutron [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 67bf25ea-5774-4246-a3e6-2aeb0ebf6731] Updating instance_info_cache with network_info: [{"id": "165104f7-de0a-47de-a4a4-918b51216f4d", "address": "fa:16:3e:d3:c8:b8", "network": {"id": "dfd15c21-b7a5-492a-afe3-8eb96515351d", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-175692276-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6014bab6bc9a4b059bab88e44b31f446", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "950a2f67-7668-4376-9d48-b38dca033c40", "external-id": "nsx-vlan-transportzone-549", "segmentation_id": 549, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap165104f7-de", "ovs_interfaceid": "165104f7-de0a-47de-a4a4-918b51216f4d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1607.907158] env[62405]: DEBUG nova.compute.manager [req-600d5b80-0e60-4295-92d4-96e96eac0a88 req-58b00e37-c8db-473e-8614-ce920d95ef7d service nova] [instance: 67bf25ea-5774-4246-a3e6-2aeb0ebf6731] Received event network-vif-plugged-165104f7-de0a-47de-a4a4-918b51216f4d {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1607.907158] env[62405]: DEBUG oslo_concurrency.lockutils [req-600d5b80-0e60-4295-92d4-96e96eac0a88 req-58b00e37-c8db-473e-8614-ce920d95ef7d service nova] Acquiring lock "67bf25ea-5774-4246-a3e6-2aeb0ebf6731-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1607.907158] env[62405]: DEBUG oslo_concurrency.lockutils [req-600d5b80-0e60-4295-92d4-96e96eac0a88 req-58b00e37-c8db-473e-8614-ce920d95ef7d service nova] Lock "67bf25ea-5774-4246-a3e6-2aeb0ebf6731-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1607.907158] env[62405]: DEBUG oslo_concurrency.lockutils [req-600d5b80-0e60-4295-92d4-96e96eac0a88 req-58b00e37-c8db-473e-8614-ce920d95ef7d service nova] Lock "67bf25ea-5774-4246-a3e6-2aeb0ebf6731-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1607.907158] env[62405]: DEBUG nova.compute.manager [req-600d5b80-0e60-4295-92d4-96e96eac0a88 req-58b00e37-c8db-473e-8614-ce920d95ef7d service nova] [instance: 67bf25ea-5774-4246-a3e6-2aeb0ebf6731] No waiting events found dispatching network-vif-plugged-165104f7-de0a-47de-a4a4-918b51216f4d {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1607.907660] env[62405]: WARNING nova.compute.manager [req-600d5b80-0e60-4295-92d4-96e96eac0a88 req-58b00e37-c8db-473e-8614-ce920d95ef7d service nova] [instance: 67bf25ea-5774-4246-a3e6-2aeb0ebf6731] Received unexpected event network-vif-plugged-165104f7-de0a-47de-a4a4-918b51216f4d for instance with vm_state building and task_state spawning. [ 1607.907660] env[62405]: DEBUG nova.compute.manager [req-600d5b80-0e60-4295-92d4-96e96eac0a88 req-58b00e37-c8db-473e-8614-ce920d95ef7d service nova] [instance: 67bf25ea-5774-4246-a3e6-2aeb0ebf6731] Received event network-changed-165104f7-de0a-47de-a4a4-918b51216f4d {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1607.907660] env[62405]: DEBUG nova.compute.manager [req-600d5b80-0e60-4295-92d4-96e96eac0a88 req-58b00e37-c8db-473e-8614-ce920d95ef7d service nova] [instance: 67bf25ea-5774-4246-a3e6-2aeb0ebf6731] Refreshing instance network info cache due to event network-changed-165104f7-de0a-47de-a4a4-918b51216f4d. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1607.907853] env[62405]: DEBUG oslo_concurrency.lockutils [req-600d5b80-0e60-4295-92d4-96e96eac0a88 req-58b00e37-c8db-473e-8614-ce920d95ef7d service nova] Acquiring lock "refresh_cache-67bf25ea-5774-4246-a3e6-2aeb0ebf6731" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1607.915383] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b1bf8a2f-d9f3-4a2e-bf6f-b6bc42142216 tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1608.012317] env[62405]: DEBUG nova.compute.manager [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] [instance: 3c05b8fc-32b8-42e4-b3c2-95d1cdbc3dc8] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1608.037840] env[62405]: DEBUG nova.virt.hardware [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1608.038095] env[62405]: DEBUG nova.virt.hardware [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1608.039247] env[62405]: DEBUG nova.virt.hardware [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1608.039247] env[62405]: DEBUG nova.virt.hardware [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1608.039247] env[62405]: DEBUG nova.virt.hardware [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1608.039247] env[62405]: DEBUG nova.virt.hardware [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1608.039247] env[62405]: DEBUG nova.virt.hardware [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1608.039516] env[62405]: DEBUG nova.virt.hardware [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1608.039714] env[62405]: DEBUG nova.virt.hardware [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1608.039885] env[62405]: DEBUG nova.virt.hardware [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1608.040221] env[62405]: DEBUG nova.virt.hardware [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1608.041262] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9490b237-97bc-4980-822f-fe6b14084aa6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.050781] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d143a1e7-1e8d-493b-b19d-4e66c251c7bd {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.110996] env[62405]: DEBUG oslo_concurrency.lockutils [None req-00f6c200-c770-4640-9621-b9bd0353c76a tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Acquiring lock "refresh_cache-0feaeb5d-9f4a-4166-99b1-f213bc4fa458" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1608.110996] env[62405]: DEBUG oslo_concurrency.lockutils [None req-00f6c200-c770-4640-9621-b9bd0353c76a tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Acquired lock "refresh_cache-0feaeb5d-9f4a-4166-99b1-f213bc4fa458" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1608.110996] env[62405]: DEBUG nova.network.neutron [None req-00f6c200-c770-4640-9621-b9bd0353c76a tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] [instance: 0feaeb5d-9f4a-4166-99b1-f213bc4fa458] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1608.181396] env[62405]: DEBUG oslo_concurrency.lockutils [None req-cde06c7d-9666-40ff-a9a7-26f752146d74 tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1608.206373] env[62405]: DEBUG nova.scheduler.client.report [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1608.294968] env[62405]: INFO nova.compute.manager [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 23748dfd-7c60-41db-8acb-7b49cf1c27db] Took 40.43 seconds to build instance. [ 1608.339084] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Releasing lock "refresh_cache-67bf25ea-5774-4246-a3e6-2aeb0ebf6731" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1608.339441] env[62405]: DEBUG nova.compute.manager [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 67bf25ea-5774-4246-a3e6-2aeb0ebf6731] Instance network_info: |[{"id": "165104f7-de0a-47de-a4a4-918b51216f4d", "address": "fa:16:3e:d3:c8:b8", "network": {"id": "dfd15c21-b7a5-492a-afe3-8eb96515351d", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-175692276-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6014bab6bc9a4b059bab88e44b31f446", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "950a2f67-7668-4376-9d48-b38dca033c40", "external-id": "nsx-vlan-transportzone-549", "segmentation_id": 549, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap165104f7-de", "ovs_interfaceid": "165104f7-de0a-47de-a4a4-918b51216f4d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1608.340954] env[62405]: DEBUG oslo_concurrency.lockutils [req-600d5b80-0e60-4295-92d4-96e96eac0a88 req-58b00e37-c8db-473e-8614-ce920d95ef7d service nova] Acquired lock "refresh_cache-67bf25ea-5774-4246-a3e6-2aeb0ebf6731" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1608.340954] env[62405]: DEBUG nova.network.neutron [req-600d5b80-0e60-4295-92d4-96e96eac0a88 req-58b00e37-c8db-473e-8614-ce920d95ef7d service nova] [instance: 67bf25ea-5774-4246-a3e6-2aeb0ebf6731] Refreshing network info cache for port 165104f7-de0a-47de-a4a4-918b51216f4d {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1608.342071] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 67bf25ea-5774-4246-a3e6-2aeb0ebf6731] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d3:c8:b8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '950a2f67-7668-4376-9d48-b38dca033c40', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '165104f7-de0a-47de-a4a4-918b51216f4d', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1608.349458] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Creating folder: Project (6014bab6bc9a4b059bab88e44b31f446). Parent ref: group-v401284. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1608.350640] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5e38a557-2956-427f-9b12-a01a6738c388 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.363266] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Created folder: Project (6014bab6bc9a4b059bab88e44b31f446) in parent group-v401284. [ 1608.363266] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Creating folder: Instances. Parent ref: group-v401399. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1608.363266] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a6164226-621c-46f0-8018-656fd0ce9bd7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.373241] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Created folder: Instances in parent group-v401399. [ 1608.373480] env[62405]: DEBUG oslo.service.loopingcall [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1608.373543] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 67bf25ea-5774-4246-a3e6-2aeb0ebf6731] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1608.373737] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9838397d-4c72-4c53-80c4-09321e88e3de {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.395023] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1608.395023] env[62405]: value = "task-1947081" [ 1608.395023] env[62405]: _type = "Task" [ 1608.395023] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1608.404210] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947081, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1608.712279] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.724s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1608.712964] env[62405]: DEBUG nova.compute.manager [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 4d59d9fd-23df-4933-97ed-32602e51e9aa] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1608.716482] env[62405]: DEBUG oslo_concurrency.lockutils [None req-588d5a69-c10c-4bb0-a7e8-764f960d4d1a tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.094s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1608.716763] env[62405]: DEBUG nova.objects.instance [None req-588d5a69-c10c-4bb0-a7e8-764f960d4d1a tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Lazy-loading 'resources' on Instance uuid 65462c7a-372e-4ba6-8f6d-e300080d65d0 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1608.797038] env[62405]: DEBUG oslo_concurrency.lockutils [None req-84c1a9c9-b820-4a6e-adea-e71b036acf13 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Lock "23748dfd-7c60-41db-8acb-7b49cf1c27db" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 70.622s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1608.905444] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947081, 'name': CreateVM_Task, 'duration_secs': 0.353156} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1608.905900] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 67bf25ea-5774-4246-a3e6-2aeb0ebf6731] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1608.906557] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1608.906721] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1608.907466] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1608.907466] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6cabc7b6-466d-439c-a5b9-6bfc5fedb96b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.912470] env[62405]: DEBUG oslo_vmware.api [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Waiting for the task: (returnval){ [ 1608.912470] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]520576a5-e5be-098b-2816-7f1ddcbd001b" [ 1608.912470] env[62405]: _type = "Task" [ 1608.912470] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1608.921031] env[62405]: DEBUG oslo_vmware.api [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]520576a5-e5be-098b-2816-7f1ddcbd001b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1608.986648] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Acquiring lock "377365a4-7538-4bab-a181-1940e6fb4066" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1608.986648] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Lock "377365a4-7538-4bab-a181-1940e6fb4066" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1609.008188] env[62405]: DEBUG nova.network.neutron [None req-00f6c200-c770-4640-9621-b9bd0353c76a tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] [instance: 0feaeb5d-9f4a-4166-99b1-f213bc4fa458] Updating instance_info_cache with network_info: [{"id": "feb1471c-63ad-4e63-bd9d-e413dee50694", "address": "fa:16:3e:17:75:40", "network": {"id": "bf574ed2-2a7e-4cf2-aa38-0adccf456674", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-2099360932-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8cf1f39c8aef41df8c86777f80980664", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60567ee6-01d0-4b16-9c7a-4a896827d6eb", "external-id": "nsx-vlan-transportzone-28", "segmentation_id": 28, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfeb1471c-63", "ovs_interfaceid": "feb1471c-63ad-4e63-bd9d-e413dee50694", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1609.194213] env[62405]: DEBUG nova.network.neutron [req-600d5b80-0e60-4295-92d4-96e96eac0a88 req-58b00e37-c8db-473e-8614-ce920d95ef7d service nova] [instance: 67bf25ea-5774-4246-a3e6-2aeb0ebf6731] Updated VIF entry in instance network info cache for port 165104f7-de0a-47de-a4a4-918b51216f4d. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1609.194605] env[62405]: DEBUG nova.network.neutron [req-600d5b80-0e60-4295-92d4-96e96eac0a88 req-58b00e37-c8db-473e-8614-ce920d95ef7d service nova] [instance: 67bf25ea-5774-4246-a3e6-2aeb0ebf6731] Updating instance_info_cache with network_info: [{"id": "165104f7-de0a-47de-a4a4-918b51216f4d", "address": "fa:16:3e:d3:c8:b8", "network": {"id": "dfd15c21-b7a5-492a-afe3-8eb96515351d", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-175692276-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6014bab6bc9a4b059bab88e44b31f446", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "950a2f67-7668-4376-9d48-b38dca033c40", "external-id": "nsx-vlan-transportzone-549", "segmentation_id": 549, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap165104f7-de", "ovs_interfaceid": "165104f7-de0a-47de-a4a4-918b51216f4d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1609.220282] env[62405]: DEBUG nova.compute.utils [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1609.221754] env[62405]: DEBUG nova.compute.manager [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 4d59d9fd-23df-4933-97ed-32602e51e9aa] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1609.221925] env[62405]: DEBUG nova.network.neutron [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 4d59d9fd-23df-4933-97ed-32602e51e9aa] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1609.299291] env[62405]: DEBUG nova.policy [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4ab5cc5829014c4ebafbf88400b22a8c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5ba2fba100b943a2a415ec37b9365388', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1609.301253] env[62405]: DEBUG nova.compute.manager [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1609.383811] env[62405]: DEBUG nova.compute.manager [req-681c954d-dacd-42b6-9136-fc32bab00609 req-ac4be858-06b8-41e9-ab81-e02f74c7f9c9 service nova] [instance: 3c05b8fc-32b8-42e4-b3c2-95d1cdbc3dc8] Received event network-vif-plugged-580e1cca-cd84-4ad1-a63a-0ace6a2eb2e6 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1609.384066] env[62405]: DEBUG oslo_concurrency.lockutils [req-681c954d-dacd-42b6-9136-fc32bab00609 req-ac4be858-06b8-41e9-ab81-e02f74c7f9c9 service nova] Acquiring lock "3c05b8fc-32b8-42e4-b3c2-95d1cdbc3dc8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1609.384338] env[62405]: DEBUG oslo_concurrency.lockutils [req-681c954d-dacd-42b6-9136-fc32bab00609 req-ac4be858-06b8-41e9-ab81-e02f74c7f9c9 service nova] Lock "3c05b8fc-32b8-42e4-b3c2-95d1cdbc3dc8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1609.384415] env[62405]: DEBUG oslo_concurrency.lockutils [req-681c954d-dacd-42b6-9136-fc32bab00609 req-ac4be858-06b8-41e9-ab81-e02f74c7f9c9 service nova] Lock "3c05b8fc-32b8-42e4-b3c2-95d1cdbc3dc8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1609.384580] env[62405]: DEBUG nova.compute.manager [req-681c954d-dacd-42b6-9136-fc32bab00609 req-ac4be858-06b8-41e9-ab81-e02f74c7f9c9 service nova] [instance: 3c05b8fc-32b8-42e4-b3c2-95d1cdbc3dc8] No waiting events found dispatching network-vif-plugged-580e1cca-cd84-4ad1-a63a-0ace6a2eb2e6 {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1609.384739] env[62405]: WARNING nova.compute.manager [req-681c954d-dacd-42b6-9136-fc32bab00609 req-ac4be858-06b8-41e9-ab81-e02f74c7f9c9 service nova] [instance: 3c05b8fc-32b8-42e4-b3c2-95d1cdbc3dc8] Received unexpected event network-vif-plugged-580e1cca-cd84-4ad1-a63a-0ace6a2eb2e6 for instance with vm_state building and task_state spawning. [ 1609.423461] env[62405]: DEBUG oslo_vmware.api [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]520576a5-e5be-098b-2816-7f1ddcbd001b, 'name': SearchDatastore_Task, 'duration_secs': 0.009684} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1609.423461] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1609.424526] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 67bf25ea-5774-4246-a3e6-2aeb0ebf6731] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1609.424526] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1609.424526] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1609.424526] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1609.429897] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f8ef53aa-51c8-4d6d-9a29-5f99b7effd6f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.438056] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1609.438254] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1609.439055] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-47420cc0-90af-4692-b5a1-e9cd477cc01c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.448499] env[62405]: DEBUG nova.network.neutron [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] [instance: 3c05b8fc-32b8-42e4-b3c2-95d1cdbc3dc8] Successfully updated port: 580e1cca-cd84-4ad1-a63a-0ace6a2eb2e6 {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1609.454720] env[62405]: DEBUG oslo_vmware.api [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Waiting for the task: (returnval){ [ 1609.454720] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]526c871a-f328-4dfe-555e-7ccc211989ea" [ 1609.454720] env[62405]: _type = "Task" [ 1609.454720] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1609.465959] env[62405]: DEBUG oslo_vmware.api [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]526c871a-f328-4dfe-555e-7ccc211989ea, 'name': SearchDatastore_Task, 'duration_secs': 0.009404} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1609.469746] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-405af06a-6c60-452b-9f9d-968e7dc12b9e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.475656] env[62405]: DEBUG oslo_vmware.api [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Waiting for the task: (returnval){ [ 1609.475656] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52ddcaf0-0f8c-b4a1-2d1b-395784c5c7f6" [ 1609.475656] env[62405]: _type = "Task" [ 1609.475656] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1609.487110] env[62405]: DEBUG oslo_vmware.api [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52ddcaf0-0f8c-b4a1-2d1b-395784c5c7f6, 'name': SearchDatastore_Task, 'duration_secs': 0.008663} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1609.489665] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1609.489932] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 67bf25ea-5774-4246-a3e6-2aeb0ebf6731/67bf25ea-5774-4246-a3e6-2aeb0ebf6731.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1609.491385] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4afc91c3-0ce4-4b99-bb74-8f5aee576df2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.498034] env[62405]: DEBUG oslo_vmware.api [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Waiting for the task: (returnval){ [ 1609.498034] env[62405]: value = "task-1947082" [ 1609.498034] env[62405]: _type = "Task" [ 1609.498034] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1609.505642] env[62405]: DEBUG oslo_vmware.api [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1947082, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1609.512528] env[62405]: DEBUG oslo_concurrency.lockutils [None req-00f6c200-c770-4640-9621-b9bd0353c76a tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Releasing lock "refresh_cache-0feaeb5d-9f4a-4166-99b1-f213bc4fa458" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1609.703144] env[62405]: DEBUG oslo_concurrency.lockutils [req-600d5b80-0e60-4295-92d4-96e96eac0a88 req-58b00e37-c8db-473e-8614-ce920d95ef7d service nova] Releasing lock "refresh_cache-67bf25ea-5774-4246-a3e6-2aeb0ebf6731" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1609.703144] env[62405]: DEBUG nova.compute.manager [req-600d5b80-0e60-4295-92d4-96e96eac0a88 req-58b00e37-c8db-473e-8614-ce920d95ef7d service nova] [instance: 9e73e2ab-1eac-4aca-905f-a8391d3f5a9b] Received event network-vif-deleted-0db09e81-ebc5-4f46-bed2-99bdd6a93b15 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1609.717647] env[62405]: DEBUG nova.network.neutron [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 4d59d9fd-23df-4933-97ed-32602e51e9aa] Successfully created port: d7c38983-3ca5-4934-af4a-1bf5f845ec9a {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1609.727551] env[62405]: DEBUG nova.compute.manager [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 4d59d9fd-23df-4933-97ed-32602e51e9aa] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1609.830392] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1609.915330] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ec47b6e-3df9-4d6d-aa34-074e0f5e66fa {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.923354] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e10a7dcd-1f9e-40d0-88a7-591628a90747 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.965168] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Acquiring lock "refresh_cache-3c05b8fc-32b8-42e4-b3c2-95d1cdbc3dc8" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1609.965348] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Acquired lock "refresh_cache-3c05b8fc-32b8-42e4-b3c2-95d1cdbc3dc8" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1609.965514] env[62405]: DEBUG nova.network.neutron [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] [instance: 3c05b8fc-32b8-42e4-b3c2-95d1cdbc3dc8] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1609.967557] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-291e5ca7-ca33-4481-8643-e1456e02aa17 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.979453] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3f4f479-8981-4222-88cd-abd81a723dea {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.997645] env[62405]: DEBUG nova.compute.provider_tree [None req-588d5a69-c10c-4bb0-a7e8-764f960d4d1a tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1610.001940] env[62405]: DEBUG nova.compute.manager [req-2daf9ea9-b3f3-49a0-8949-3d21072728b8 req-f74a2717-a763-41a9-a193-0c613a1cb66e service nova] [instance: 23748dfd-7c60-41db-8acb-7b49cf1c27db] Received event network-changed-666e898c-754c-4b07-b0d9-dac2a9a5bc6d {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1610.003064] env[62405]: DEBUG nova.compute.manager [req-2daf9ea9-b3f3-49a0-8949-3d21072728b8 req-f74a2717-a763-41a9-a193-0c613a1cb66e service nova] [instance: 23748dfd-7c60-41db-8acb-7b49cf1c27db] Refreshing instance network info cache due to event network-changed-666e898c-754c-4b07-b0d9-dac2a9a5bc6d. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1610.003064] env[62405]: DEBUG oslo_concurrency.lockutils [req-2daf9ea9-b3f3-49a0-8949-3d21072728b8 req-f74a2717-a763-41a9-a193-0c613a1cb66e service nova] Acquiring lock "refresh_cache-23748dfd-7c60-41db-8acb-7b49cf1c27db" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1610.003219] env[62405]: DEBUG oslo_concurrency.lockutils [req-2daf9ea9-b3f3-49a0-8949-3d21072728b8 req-f74a2717-a763-41a9-a193-0c613a1cb66e service nova] Acquired lock "refresh_cache-23748dfd-7c60-41db-8acb-7b49cf1c27db" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1610.003555] env[62405]: DEBUG nova.network.neutron [req-2daf9ea9-b3f3-49a0-8949-3d21072728b8 req-f74a2717-a763-41a9-a193-0c613a1cb66e service nova] [instance: 23748dfd-7c60-41db-8acb-7b49cf1c27db] Refreshing network info cache for port 666e898c-754c-4b07-b0d9-dac2a9a5bc6d {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1610.014457] env[62405]: DEBUG oslo_vmware.api [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1947082, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.496869} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1610.018229] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 67bf25ea-5774-4246-a3e6-2aeb0ebf6731/67bf25ea-5774-4246-a3e6-2aeb0ebf6731.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1610.018229] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 67bf25ea-5774-4246-a3e6-2aeb0ebf6731] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1610.018229] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-dd8574eb-d484-4243-b0af-ec57e7d698c3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.020264] env[62405]: DEBUG nova.compute.manager [None req-00f6c200-c770-4640-9621-b9bd0353c76a tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] [instance: 0feaeb5d-9f4a-4166-99b1-f213bc4fa458] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1610.023554] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dac02ef2-cc88-416b-9bc3-27f12d408afd {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.033473] env[62405]: DEBUG oslo_vmware.api [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Waiting for the task: (returnval){ [ 1610.033473] env[62405]: value = "task-1947083" [ 1610.033473] env[62405]: _type = "Task" [ 1610.033473] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1610.041381] env[62405]: DEBUG oslo_vmware.api [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1947083, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1610.506709] env[62405]: DEBUG nova.scheduler.client.report [None req-588d5a69-c10c-4bb0-a7e8-764f960d4d1a tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1610.512641] env[62405]: DEBUG nova.network.neutron [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] [instance: 3c05b8fc-32b8-42e4-b3c2-95d1cdbc3dc8] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1610.547911] env[62405]: DEBUG oslo_vmware.api [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1947083, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064727} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1610.548336] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 67bf25ea-5774-4246-a3e6-2aeb0ebf6731] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1610.549156] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4e23ffc-c9c0-4f00-b1b2-97ef8e88556e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.578106] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 67bf25ea-5774-4246-a3e6-2aeb0ebf6731] Reconfiguring VM instance instance-00000024 to attach disk [datastore1] 67bf25ea-5774-4246-a3e6-2aeb0ebf6731/67bf25ea-5774-4246-a3e6-2aeb0ebf6731.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1610.579535] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dd624a12-0228-499b-80b7-d436b3988ba9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.603391] env[62405]: DEBUG oslo_vmware.api [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Waiting for the task: (returnval){ [ 1610.603391] env[62405]: value = "task-1947084" [ 1610.603391] env[62405]: _type = "Task" [ 1610.603391] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1610.614060] env[62405]: DEBUG oslo_vmware.api [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1947084, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1610.740053] env[62405]: DEBUG nova.compute.manager [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 4d59d9fd-23df-4933-97ed-32602e51e9aa] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1610.765337] env[62405]: DEBUG nova.virt.hardware [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1610.765666] env[62405]: DEBUG nova.virt.hardware [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1610.766841] env[62405]: DEBUG nova.virt.hardware [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1610.767127] env[62405]: DEBUG nova.virt.hardware [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1610.767310] env[62405]: DEBUG nova.virt.hardware [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1610.767508] env[62405]: DEBUG nova.virt.hardware [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1610.767777] env[62405]: DEBUG nova.virt.hardware [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1610.767987] env[62405]: DEBUG nova.virt.hardware [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1610.768225] env[62405]: DEBUG nova.virt.hardware [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1610.768426] env[62405]: DEBUG nova.virt.hardware [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1610.768658] env[62405]: DEBUG nova.virt.hardware [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1610.769590] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0246e33a-d2e0-46ba-bc3d-4134cbc85279 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.777529] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4304530-028c-4978-b7b0-5a232d4a4a90 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.015908] env[62405]: DEBUG oslo_concurrency.lockutils [None req-588d5a69-c10c-4bb0-a7e8-764f960d4d1a tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.299s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1611.018463] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 25.904s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1611.045499] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69a99a62-682c-4678-a5d3-2253483aa416 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.049803] env[62405]: INFO nova.scheduler.client.report [None req-588d5a69-c10c-4bb0-a7e8-764f960d4d1a tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Deleted allocations for instance 65462c7a-372e-4ba6-8f6d-e300080d65d0 [ 1611.055767] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-00f6c200-c770-4640-9621-b9bd0353c76a tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] [instance: 0feaeb5d-9f4a-4166-99b1-f213bc4fa458] Doing hard reboot of VM {{(pid=62405) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 1611.055981] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-8ea0ce17-2a8c-4360-9951-fcdf02dfc939 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.062751] env[62405]: DEBUG oslo_vmware.api [None req-00f6c200-c770-4640-9621-b9bd0353c76a tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Waiting for the task: (returnval){ [ 1611.062751] env[62405]: value = "task-1947085" [ 1611.062751] env[62405]: _type = "Task" [ 1611.062751] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1611.075025] env[62405]: DEBUG oslo_vmware.api [None req-00f6c200-c770-4640-9621-b9bd0353c76a tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Task: {'id': task-1947085, 'name': ResetVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1611.089466] env[62405]: DEBUG nova.network.neutron [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] [instance: 3c05b8fc-32b8-42e4-b3c2-95d1cdbc3dc8] Updating instance_info_cache with network_info: [{"id": "580e1cca-cd84-4ad1-a63a-0ace6a2eb2e6", "address": "fa:16:3e:9d:fe:7d", "network": {"id": "729df6bd-1191-4437-881f-56694dca7c0e", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-55656275-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9b8a0d474a3e4c6293a74944e354a7c4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60bdba1a-14cf-46b2-9d8b-aeaf4d80c815", "external-id": "nsx-vlan-transportzone-922", "segmentation_id": 922, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap580e1cca-cd", "ovs_interfaceid": "580e1cca-cd84-4ad1-a63a-0ace6a2eb2e6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1611.119544] env[62405]: DEBUG oslo_vmware.api [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1947084, 'name': ReconfigVM_Task, 'duration_secs': 0.345383} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1611.120785] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 67bf25ea-5774-4246-a3e6-2aeb0ebf6731] Reconfigured VM instance instance-00000024 to attach disk [datastore1] 67bf25ea-5774-4246-a3e6-2aeb0ebf6731/67bf25ea-5774-4246-a3e6-2aeb0ebf6731.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1611.120785] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1d650b8e-38f3-46c4-b719-19e873827e51 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.128686] env[62405]: DEBUG oslo_vmware.api [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Waiting for the task: (returnval){ [ 1611.128686] env[62405]: value = "task-1947086" [ 1611.128686] env[62405]: _type = "Task" [ 1611.128686] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1611.138492] env[62405]: DEBUG oslo_vmware.api [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1947086, 'name': Rename_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1611.202422] env[62405]: DEBUG nova.network.neutron [req-2daf9ea9-b3f3-49a0-8949-3d21072728b8 req-f74a2717-a763-41a9-a193-0c613a1cb66e service nova] [instance: 23748dfd-7c60-41db-8acb-7b49cf1c27db] Updated VIF entry in instance network info cache for port 666e898c-754c-4b07-b0d9-dac2a9a5bc6d. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1611.202792] env[62405]: DEBUG nova.network.neutron [req-2daf9ea9-b3f3-49a0-8949-3d21072728b8 req-f74a2717-a763-41a9-a193-0c613a1cb66e service nova] [instance: 23748dfd-7c60-41db-8acb-7b49cf1c27db] Updating instance_info_cache with network_info: [{"id": "666e898c-754c-4b07-b0d9-dac2a9a5bc6d", "address": "fa:16:3e:59:60:92", "network": {"id": "8e7f7222-48db-4dd5-a9e8-9a6d2b598918", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1945720715-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.216", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba9083cddcc24345b6ea5d2cbbbec5ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap666e898c-75", "ovs_interfaceid": "666e898c-754c-4b07-b0d9-dac2a9a5bc6d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1611.331630] env[62405]: DEBUG nova.network.neutron [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 4d59d9fd-23df-4933-97ed-32602e51e9aa] Successfully updated port: d7c38983-3ca5-4934-af4a-1bf5f845ec9a {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1611.534818] env[62405]: DEBUG nova.compute.manager [req-c9288f8f-264f-461a-bffe-a0a1be758903 req-3fa68995-2275-4e14-9783-a463a4e4a7e0 service nova] [instance: 3c05b8fc-32b8-42e4-b3c2-95d1cdbc3dc8] Received event network-changed-580e1cca-cd84-4ad1-a63a-0ace6a2eb2e6 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1611.534935] env[62405]: DEBUG nova.compute.manager [req-c9288f8f-264f-461a-bffe-a0a1be758903 req-3fa68995-2275-4e14-9783-a463a4e4a7e0 service nova] [instance: 3c05b8fc-32b8-42e4-b3c2-95d1cdbc3dc8] Refreshing instance network info cache due to event network-changed-580e1cca-cd84-4ad1-a63a-0ace6a2eb2e6. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1611.535548] env[62405]: DEBUG oslo_concurrency.lockutils [req-c9288f8f-264f-461a-bffe-a0a1be758903 req-3fa68995-2275-4e14-9783-a463a4e4a7e0 service nova] Acquiring lock "refresh_cache-3c05b8fc-32b8-42e4-b3c2-95d1cdbc3dc8" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1611.563027] env[62405]: DEBUG oslo_concurrency.lockutils [None req-588d5a69-c10c-4bb0-a7e8-764f960d4d1a tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Lock "65462c7a-372e-4ba6-8f6d-e300080d65d0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.904s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1611.563027] env[62405]: DEBUG oslo_concurrency.lockutils [None req-68275af2-ae8f-46ab-9c97-43ca2e39b453 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Lock "65462c7a-372e-4ba6-8f6d-e300080d65d0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 24.115s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1611.563027] env[62405]: DEBUG oslo_concurrency.lockutils [None req-68275af2-ae8f-46ab-9c97-43ca2e39b453 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Acquiring lock "65462c7a-372e-4ba6-8f6d-e300080d65d0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1611.563027] env[62405]: DEBUG oslo_concurrency.lockutils [None req-68275af2-ae8f-46ab-9c97-43ca2e39b453 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Lock "65462c7a-372e-4ba6-8f6d-e300080d65d0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1611.563348] env[62405]: DEBUG oslo_concurrency.lockutils [None req-68275af2-ae8f-46ab-9c97-43ca2e39b453 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Lock "65462c7a-372e-4ba6-8f6d-e300080d65d0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1611.564020] env[62405]: INFO nova.compute.manager [None req-68275af2-ae8f-46ab-9c97-43ca2e39b453 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] [instance: 65462c7a-372e-4ba6-8f6d-e300080d65d0] Terminating instance [ 1611.575561] env[62405]: DEBUG oslo_vmware.api [None req-00f6c200-c770-4640-9621-b9bd0353c76a tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Task: {'id': task-1947085, 'name': ResetVM_Task, 'duration_secs': 0.118072} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1611.576114] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-00f6c200-c770-4640-9621-b9bd0353c76a tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] [instance: 0feaeb5d-9f4a-4166-99b1-f213bc4fa458] Did hard reboot of VM {{(pid=62405) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 1611.576469] env[62405]: DEBUG nova.compute.manager [None req-00f6c200-c770-4640-9621-b9bd0353c76a tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] [instance: 0feaeb5d-9f4a-4166-99b1-f213bc4fa458] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1611.577773] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-167f6b3f-5901-479e-b3c7-2f7b5db6afe1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.596226] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Releasing lock "refresh_cache-3c05b8fc-32b8-42e4-b3c2-95d1cdbc3dc8" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1611.596226] env[62405]: DEBUG nova.compute.manager [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] [instance: 3c05b8fc-32b8-42e4-b3c2-95d1cdbc3dc8] Instance network_info: |[{"id": "580e1cca-cd84-4ad1-a63a-0ace6a2eb2e6", "address": "fa:16:3e:9d:fe:7d", "network": {"id": "729df6bd-1191-4437-881f-56694dca7c0e", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-55656275-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9b8a0d474a3e4c6293a74944e354a7c4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60bdba1a-14cf-46b2-9d8b-aeaf4d80c815", "external-id": "nsx-vlan-transportzone-922", "segmentation_id": 922, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap580e1cca-cd", "ovs_interfaceid": "580e1cca-cd84-4ad1-a63a-0ace6a2eb2e6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1611.596444] env[62405]: DEBUG oslo_concurrency.lockutils [req-c9288f8f-264f-461a-bffe-a0a1be758903 req-3fa68995-2275-4e14-9783-a463a4e4a7e0 service nova] Acquired lock "refresh_cache-3c05b8fc-32b8-42e4-b3c2-95d1cdbc3dc8" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1611.596444] env[62405]: DEBUG nova.network.neutron [req-c9288f8f-264f-461a-bffe-a0a1be758903 req-3fa68995-2275-4e14-9783-a463a4e4a7e0 service nova] [instance: 3c05b8fc-32b8-42e4-b3c2-95d1cdbc3dc8] Refreshing network info cache for port 580e1cca-cd84-4ad1-a63a-0ace6a2eb2e6 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1611.597686] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] [instance: 3c05b8fc-32b8-42e4-b3c2-95d1cdbc3dc8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9d:fe:7d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '60bdba1a-14cf-46b2-9d8b-aeaf4d80c815', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '580e1cca-cd84-4ad1-a63a-0ace6a2eb2e6', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1611.606135] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Creating folder: Project (9b8a0d474a3e4c6293a74944e354a7c4). Parent ref: group-v401284. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1611.607899] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-32304500-39a8-4fc4-85c9-b22854d54f54 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.620016] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Created folder: Project (9b8a0d474a3e4c6293a74944e354a7c4) in parent group-v401284. [ 1611.620016] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Creating folder: Instances. Parent ref: group-v401402. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1611.620016] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bc9375e3-8211-4cbd-b758-95ca06d9d995 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.630497] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Created folder: Instances in parent group-v401402. [ 1611.631035] env[62405]: DEBUG oslo.service.loopingcall [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1611.633710] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3c05b8fc-32b8-42e4-b3c2-95d1cdbc3dc8] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1611.633942] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dd6360a6-92a2-4781-a826-b4c468b72acf {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.654142] env[62405]: DEBUG oslo_vmware.api [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1947086, 'name': Rename_Task, 'duration_secs': 0.145841} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1611.655422] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 67bf25ea-5774-4246-a3e6-2aeb0ebf6731] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1611.655654] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1611.655654] env[62405]: value = "task-1947089" [ 1611.655654] env[62405]: _type = "Task" [ 1611.655654] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1611.655827] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-89a5330b-bc48-45f1-925d-2ae43d83f1c1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.666596] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947089, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1611.667938] env[62405]: DEBUG oslo_vmware.api [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Waiting for the task: (returnval){ [ 1611.667938] env[62405]: value = "task-1947090" [ 1611.667938] env[62405]: _type = "Task" [ 1611.667938] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1611.676546] env[62405]: DEBUG oslo_vmware.api [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1947090, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1611.706063] env[62405]: DEBUG oslo_concurrency.lockutils [req-2daf9ea9-b3f3-49a0-8949-3d21072728b8 req-f74a2717-a763-41a9-a193-0c613a1cb66e service nova] Releasing lock "refresh_cache-23748dfd-7c60-41db-8acb-7b49cf1c27db" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1611.834199] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Acquiring lock "refresh_cache-4d59d9fd-23df-4933-97ed-32602e51e9aa" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1611.834356] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Acquired lock "refresh_cache-4d59d9fd-23df-4933-97ed-32602e51e9aa" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1611.834530] env[62405]: DEBUG nova.network.neutron [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 4d59d9fd-23df-4933-97ed-32602e51e9aa] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1612.061008] env[62405]: DEBUG nova.compute.manager [req-09780f71-787e-4c31-94ca-de557b4c8e96 req-3a5d90d2-2c22-4125-b761-d9a5c51e2389 service nova] [instance: 4d59d9fd-23df-4933-97ed-32602e51e9aa] Received event network-vif-plugged-d7c38983-3ca5-4934-af4a-1bf5f845ec9a {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1612.061302] env[62405]: DEBUG oslo_concurrency.lockutils [req-09780f71-787e-4c31-94ca-de557b4c8e96 req-3a5d90d2-2c22-4125-b761-d9a5c51e2389 service nova] Acquiring lock "4d59d9fd-23df-4933-97ed-32602e51e9aa-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1612.061550] env[62405]: DEBUG oslo_concurrency.lockutils [req-09780f71-787e-4c31-94ca-de557b4c8e96 req-3a5d90d2-2c22-4125-b761-d9a5c51e2389 service nova] Lock "4d59d9fd-23df-4933-97ed-32602e51e9aa-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1612.061730] env[62405]: DEBUG oslo_concurrency.lockutils [req-09780f71-787e-4c31-94ca-de557b4c8e96 req-3a5d90d2-2c22-4125-b761-d9a5c51e2389 service nova] Lock "4d59d9fd-23df-4933-97ed-32602e51e9aa-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1612.061905] env[62405]: DEBUG nova.compute.manager [req-09780f71-787e-4c31-94ca-de557b4c8e96 req-3a5d90d2-2c22-4125-b761-d9a5c51e2389 service nova] [instance: 4d59d9fd-23df-4933-97ed-32602e51e9aa] No waiting events found dispatching network-vif-plugged-d7c38983-3ca5-4934-af4a-1bf5f845ec9a {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1612.062211] env[62405]: WARNING nova.compute.manager [req-09780f71-787e-4c31-94ca-de557b4c8e96 req-3a5d90d2-2c22-4125-b761-d9a5c51e2389 service nova] [instance: 4d59d9fd-23df-4933-97ed-32602e51e9aa] Received unexpected event network-vif-plugged-d7c38983-3ca5-4934-af4a-1bf5f845ec9a for instance with vm_state building and task_state spawning. [ 1612.062758] env[62405]: DEBUG nova.compute.manager [req-09780f71-787e-4c31-94ca-de557b4c8e96 req-3a5d90d2-2c22-4125-b761-d9a5c51e2389 service nova] [instance: 4d59d9fd-23df-4933-97ed-32602e51e9aa] Received event network-changed-d7c38983-3ca5-4934-af4a-1bf5f845ec9a {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1612.062758] env[62405]: DEBUG nova.compute.manager [req-09780f71-787e-4c31-94ca-de557b4c8e96 req-3a5d90d2-2c22-4125-b761-d9a5c51e2389 service nova] [instance: 4d59d9fd-23df-4933-97ed-32602e51e9aa] Refreshing instance network info cache due to event network-changed-d7c38983-3ca5-4934-af4a-1bf5f845ec9a. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1612.063071] env[62405]: DEBUG oslo_concurrency.lockutils [req-09780f71-787e-4c31-94ca-de557b4c8e96 req-3a5d90d2-2c22-4125-b761-d9a5c51e2389 service nova] Acquiring lock "refresh_cache-4d59d9fd-23df-4933-97ed-32602e51e9aa" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1612.064198] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 262424b0-dc7d-4b6c-9539-2d6cd23a93da actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1612.064351] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 0491dc4b-cf35-4035-aca9-baf43b86af7e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1612.064478] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 9b71f962-2b92-4f7b-bb8d-b50da5130018 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1612.064723] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance b8ff115b-64f1-4584-afa2-478c5e6b726b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1612.064723] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 3f9849b8-6aaa-4d32-b140-207d5b54d68f actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1612.064849] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 792cd2c8-a67d-4b16-93ab-722fcc8b622d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1612.064927] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance b3647042-89a1-4d15-b85e-49a5c8def1d4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1612.065070] env[62405]: WARNING nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance fbedaa93-5968-4b42-b93e-201d2b44b32b is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1612.065217] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 777ddb84-25b9-4da6-be6b-a2289dbf510a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1612.065340] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance f8c6f99f-499f-4886-aae9-5f08969175f6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1612.065450] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance b21dc1e7-dacd-4154-9bc3-0fa3774695a8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1612.065578] env[62405]: WARNING nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance ca4d11fe-1d0f-468b-a2f4-21c5b84342ab is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1612.065693] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 15218373-ffa5-49ce-b604-423b7fc5fb35 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1612.065806] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 058682a1-5240-4414-9203-c612ecd12999 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1612.065928] env[62405]: WARNING nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance e8ed73c3-fb86-42c3-aae6-b0c8d03149ce is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1612.066055] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1612.066185] env[62405]: WARNING nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 14dab775-19b4-4d0d-a7ee-67705f7e45ca is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1612.066308] env[62405]: WARNING nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 900b95b5-fe5a-46c1-909a-f81b82ced0ef is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1612.066446] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 78b4c6ea-6f5b-40d8-8c4a-10332f176e0b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1612.066576] env[62405]: WARNING nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 9e73e2ab-1eac-4aca-905f-a8391d3f5a9b is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1612.066689] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 0feaeb5d-9f4a-4166-99b1-f213bc4fa458 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1612.066808] env[62405]: WARNING nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance a73579d1-8647-49fe-98ce-0baffd1a558f is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1612.066919] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 23748dfd-7c60-41db-8acb-7b49cf1c27db actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1612.067045] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 67bf25ea-5774-4246-a3e6-2aeb0ebf6731 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1612.067164] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 3c05b8fc-32b8-42e4-b3c2-95d1cdbc3dc8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1612.067277] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 4d59d9fd-23df-4933-97ed-32602e51e9aa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1612.070568] env[62405]: DEBUG oslo_concurrency.lockutils [None req-68275af2-ae8f-46ab-9c97-43ca2e39b453 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Acquiring lock "refresh_cache-65462c7a-372e-4ba6-8f6d-e300080d65d0" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1612.070728] env[62405]: DEBUG oslo_concurrency.lockutils [None req-68275af2-ae8f-46ab-9c97-43ca2e39b453 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Acquired lock "refresh_cache-65462c7a-372e-4ba6-8f6d-e300080d65d0" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1612.070898] env[62405]: DEBUG nova.network.neutron [None req-68275af2-ae8f-46ab-9c97-43ca2e39b453 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] [instance: 65462c7a-372e-4ba6-8f6d-e300080d65d0] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1612.090286] env[62405]: DEBUG oslo_concurrency.lockutils [None req-00f6c200-c770-4640-9621-b9bd0353c76a tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Lock "0feaeb5d-9f4a-4166-99b1-f213bc4fa458" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.506s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1612.169965] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947089, 'name': CreateVM_Task, 'duration_secs': 0.329013} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1612.174247] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3c05b8fc-32b8-42e4-b3c2-95d1cdbc3dc8] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1612.175580] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1612.175820] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1612.176267] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1612.177045] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-79f0baeb-0ebe-455d-9b60-6c9059f3905c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.184644] env[62405]: DEBUG oslo_vmware.api [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1947090, 'name': PowerOnVM_Task, 'duration_secs': 0.495457} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1612.186381] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 67bf25ea-5774-4246-a3e6-2aeb0ebf6731] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1612.186690] env[62405]: INFO nova.compute.manager [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 67bf25ea-5774-4246-a3e6-2aeb0ebf6731] Took 6.90 seconds to spawn the instance on the hypervisor. [ 1612.186953] env[62405]: DEBUG nova.compute.manager [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 67bf25ea-5774-4246-a3e6-2aeb0ebf6731] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1612.187423] env[62405]: DEBUG oslo_vmware.api [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Waiting for the task: (returnval){ [ 1612.187423] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52856d50-3e3a-8354-d064-7bed48044ffa" [ 1612.187423] env[62405]: _type = "Task" [ 1612.187423] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1612.188417] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e54c270-615b-4f7a-96ee-40d84972d4f3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.208482] env[62405]: DEBUG oslo_vmware.api [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52856d50-3e3a-8354-d064-7bed48044ffa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1612.254724] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b97ea554-13a3-4305-999f-6075250c6fb4 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Acquiring lock "b3647042-89a1-4d15-b85e-49a5c8def1d4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1612.255009] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b97ea554-13a3-4305-999f-6075250c6fb4 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Lock "b3647042-89a1-4d15-b85e-49a5c8def1d4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1612.255234] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b97ea554-13a3-4305-999f-6075250c6fb4 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Acquiring lock "b3647042-89a1-4d15-b85e-49a5c8def1d4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1612.255420] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b97ea554-13a3-4305-999f-6075250c6fb4 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Lock "b3647042-89a1-4d15-b85e-49a5c8def1d4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1612.255589] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b97ea554-13a3-4305-999f-6075250c6fb4 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Lock "b3647042-89a1-4d15-b85e-49a5c8def1d4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1612.257592] env[62405]: INFO nova.compute.manager [None req-b97ea554-13a3-4305-999f-6075250c6fb4 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b3647042-89a1-4d15-b85e-49a5c8def1d4] Terminating instance [ 1612.386633] env[62405]: DEBUG nova.network.neutron [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 4d59d9fd-23df-4933-97ed-32602e51e9aa] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1612.395781] env[62405]: DEBUG nova.network.neutron [req-c9288f8f-264f-461a-bffe-a0a1be758903 req-3fa68995-2275-4e14-9783-a463a4e4a7e0 service nova] [instance: 3c05b8fc-32b8-42e4-b3c2-95d1cdbc3dc8] Updated VIF entry in instance network info cache for port 580e1cca-cd84-4ad1-a63a-0ace6a2eb2e6. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1612.396133] env[62405]: DEBUG nova.network.neutron [req-c9288f8f-264f-461a-bffe-a0a1be758903 req-3fa68995-2275-4e14-9783-a463a4e4a7e0 service nova] [instance: 3c05b8fc-32b8-42e4-b3c2-95d1cdbc3dc8] Updating instance_info_cache with network_info: [{"id": "580e1cca-cd84-4ad1-a63a-0ace6a2eb2e6", "address": "fa:16:3e:9d:fe:7d", "network": {"id": "729df6bd-1191-4437-881f-56694dca7c0e", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-55656275-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9b8a0d474a3e4c6293a74944e354a7c4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60bdba1a-14cf-46b2-9d8b-aeaf4d80c815", "external-id": "nsx-vlan-transportzone-922", "segmentation_id": 922, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap580e1cca-cd", "ovs_interfaceid": "580e1cca-cd84-4ad1-a63a-0ace6a2eb2e6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1612.569857] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance d5686d7c-a73f-4e02-8726-eab8221a0eae has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1612.573228] env[62405]: DEBUG nova.compute.utils [None req-68275af2-ae8f-46ab-9c97-43ca2e39b453 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] [instance: 65462c7a-372e-4ba6-8f6d-e300080d65d0] Can not refresh info_cache because instance was not found {{(pid=62405) refresh_info_cache_for_instance /opt/stack/nova/nova/compute/utils.py:1055}} [ 1612.590535] env[62405]: DEBUG nova.network.neutron [None req-68275af2-ae8f-46ab-9c97-43ca2e39b453 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] [instance: 65462c7a-372e-4ba6-8f6d-e300080d65d0] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1612.614464] env[62405]: DEBUG nova.network.neutron [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 4d59d9fd-23df-4933-97ed-32602e51e9aa] Updating instance_info_cache with network_info: [{"id": "d7c38983-3ca5-4934-af4a-1bf5f845ec9a", "address": "fa:16:3e:12:b6:c9", "network": {"id": "c3707aa8-0488-4e47-a655-7bb7788995c0", "bridge": "br-int", "label": "tempest-ServersTestJSON-2054002489-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ba2fba100b943a2a415ec37b9365388", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "68ec9c06-8680-4a41-abad-cddbd1f768c9", "external-id": "nsx-vlan-transportzone-883", "segmentation_id": 883, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd7c38983-3c", "ovs_interfaceid": "d7c38983-3ca5-4934-af4a-1bf5f845ec9a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1612.669768] env[62405]: DEBUG nova.network.neutron [None req-68275af2-ae8f-46ab-9c97-43ca2e39b453 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] [instance: 65462c7a-372e-4ba6-8f6d-e300080d65d0] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1612.704108] env[62405]: DEBUG oslo_vmware.api [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52856d50-3e3a-8354-d064-7bed48044ffa, 'name': SearchDatastore_Task, 'duration_secs': 0.017355} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1612.704435] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1612.704664] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] [instance: 3c05b8fc-32b8-42e4-b3c2-95d1cdbc3dc8] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1612.704895] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1612.705063] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1612.705268] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1612.705521] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e5ce3fc5-c0b0-4a08-b797-81cbadbeaeae {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.718020] env[62405]: DEBUG oslo_concurrency.lockutils [None req-91a773c1-df78-4e8b-9e9d-bfbd423aeaaa tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Acquiring lock "262424b0-dc7d-4b6c-9539-2d6cd23a93da" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1612.718020] env[62405]: DEBUG oslo_concurrency.lockutils [None req-91a773c1-df78-4e8b-9e9d-bfbd423aeaaa tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Lock "262424b0-dc7d-4b6c-9539-2d6cd23a93da" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1612.718020] env[62405]: DEBUG oslo_concurrency.lockutils [None req-91a773c1-df78-4e8b-9e9d-bfbd423aeaaa tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Acquiring lock "262424b0-dc7d-4b6c-9539-2d6cd23a93da-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1612.718020] env[62405]: DEBUG oslo_concurrency.lockutils [None req-91a773c1-df78-4e8b-9e9d-bfbd423aeaaa tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Lock "262424b0-dc7d-4b6c-9539-2d6cd23a93da-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1612.718284] env[62405]: DEBUG oslo_concurrency.lockutils [None req-91a773c1-df78-4e8b-9e9d-bfbd423aeaaa tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Lock "262424b0-dc7d-4b6c-9539-2d6cd23a93da-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1612.719464] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1612.719779] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1612.720809] env[62405]: INFO nova.compute.manager [None req-91a773c1-df78-4e8b-9e9d-bfbd423aeaaa tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] [instance: 262424b0-dc7d-4b6c-9539-2d6cd23a93da] Terminating instance [ 1612.729041] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7a6aca0a-3f53-4645-8de0-d5c71b84a097 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.742073] env[62405]: DEBUG oslo_vmware.api [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Waiting for the task: (returnval){ [ 1612.742073] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]520eb380-4de0-88af-5bba-23784a450ec4" [ 1612.742073] env[62405]: _type = "Task" [ 1612.742073] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1612.751449] env[62405]: DEBUG oslo_vmware.api [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]520eb380-4de0-88af-5bba-23784a450ec4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1612.754188] env[62405]: INFO nova.compute.manager [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 67bf25ea-5774-4246-a3e6-2aeb0ebf6731] Took 41.50 seconds to build instance. [ 1612.761038] env[62405]: DEBUG nova.compute.manager [None req-b97ea554-13a3-4305-999f-6075250c6fb4 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b3647042-89a1-4d15-b85e-49a5c8def1d4] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1612.761215] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-b97ea554-13a3-4305-999f-6075250c6fb4 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b3647042-89a1-4d15-b85e-49a5c8def1d4] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1612.762008] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1685b313-3e0a-4dbe-a565-49650da3f953 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.770740] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-b97ea554-13a3-4305-999f-6075250c6fb4 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b3647042-89a1-4d15-b85e-49a5c8def1d4] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1612.771484] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-64f38c06-1871-4b0c-b98e-a1f9c61bbf06 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.778976] env[62405]: DEBUG oslo_vmware.api [None req-b97ea554-13a3-4305-999f-6075250c6fb4 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Waiting for the task: (returnval){ [ 1612.778976] env[62405]: value = "task-1947091" [ 1612.778976] env[62405]: _type = "Task" [ 1612.778976] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1612.788049] env[62405]: DEBUG oslo_vmware.api [None req-b97ea554-13a3-4305-999f-6075250c6fb4 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': task-1947091, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1612.899249] env[62405]: DEBUG oslo_concurrency.lockutils [req-c9288f8f-264f-461a-bffe-a0a1be758903 req-3fa68995-2275-4e14-9783-a463a4e4a7e0 service nova] Releasing lock "refresh_cache-3c05b8fc-32b8-42e4-b3c2-95d1cdbc3dc8" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1613.073750] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 8ea521a4-7d4c-4f0f-9bf6-44b8559eefd6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1613.117246] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Releasing lock "refresh_cache-4d59d9fd-23df-4933-97ed-32602e51e9aa" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1613.117552] env[62405]: DEBUG nova.compute.manager [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 4d59d9fd-23df-4933-97ed-32602e51e9aa] Instance network_info: |[{"id": "d7c38983-3ca5-4934-af4a-1bf5f845ec9a", "address": "fa:16:3e:12:b6:c9", "network": {"id": "c3707aa8-0488-4e47-a655-7bb7788995c0", "bridge": "br-int", "label": "tempest-ServersTestJSON-2054002489-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ba2fba100b943a2a415ec37b9365388", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "68ec9c06-8680-4a41-abad-cddbd1f768c9", "external-id": "nsx-vlan-transportzone-883", "segmentation_id": 883, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd7c38983-3c", "ovs_interfaceid": "d7c38983-3ca5-4934-af4a-1bf5f845ec9a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1613.117835] env[62405]: DEBUG oslo_concurrency.lockutils [req-09780f71-787e-4c31-94ca-de557b4c8e96 req-3a5d90d2-2c22-4125-b761-d9a5c51e2389 service nova] Acquired lock "refresh_cache-4d59d9fd-23df-4933-97ed-32602e51e9aa" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1613.118042] env[62405]: DEBUG nova.network.neutron [req-09780f71-787e-4c31-94ca-de557b4c8e96 req-3a5d90d2-2c22-4125-b761-d9a5c51e2389 service nova] [instance: 4d59d9fd-23df-4933-97ed-32602e51e9aa] Refreshing network info cache for port d7c38983-3ca5-4934-af4a-1bf5f845ec9a {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1613.119108] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 4d59d9fd-23df-4933-97ed-32602e51e9aa] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:12:b6:c9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '68ec9c06-8680-4a41-abad-cddbd1f768c9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd7c38983-3ca5-4934-af4a-1bf5f845ec9a', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1613.127262] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Creating folder: Project (5ba2fba100b943a2a415ec37b9365388). Parent ref: group-v401284. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1613.130361] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f3d4011f-b3a9-4fb0-9393-db3838c65d8c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.142392] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Created folder: Project (5ba2fba100b943a2a415ec37b9365388) in parent group-v401284. [ 1613.142588] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Creating folder: Instances. Parent ref: group-v401405. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1613.142824] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2306d611-dbff-463a-8a77-ca8ce92a9f3f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.153263] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Created folder: Instances in parent group-v401405. [ 1613.153511] env[62405]: DEBUG oslo.service.loopingcall [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1613.153701] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4d59d9fd-23df-4933-97ed-32602e51e9aa] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1613.153908] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d9337e08-fd7c-48f5-87b6-489332617a59 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.176007] env[62405]: DEBUG oslo_concurrency.lockutils [None req-68275af2-ae8f-46ab-9c97-43ca2e39b453 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Releasing lock "refresh_cache-65462c7a-372e-4ba6-8f6d-e300080d65d0" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1613.176431] env[62405]: DEBUG nova.compute.manager [None req-68275af2-ae8f-46ab-9c97-43ca2e39b453 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] [instance: 65462c7a-372e-4ba6-8f6d-e300080d65d0] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1613.176621] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-68275af2-ae8f-46ab-9c97-43ca2e39b453 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] [instance: 65462c7a-372e-4ba6-8f6d-e300080d65d0] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1613.176904] env[62405]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e50b1a25-4c30-4237-83a2-8bf6849590ac {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.188279] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-021dfeeb-19f9-4359-8efa-e9230cf51665 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.199845] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1613.199845] env[62405]: value = "task-1947094" [ 1613.199845] env[62405]: _type = "Task" [ 1613.199845] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1613.208728] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947094, 'name': CreateVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1613.224810] env[62405]: WARNING nova.virt.vmwareapi.vmops [None req-68275af2-ae8f-46ab-9c97-43ca2e39b453 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] [instance: 65462c7a-372e-4ba6-8f6d-e300080d65d0] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 65462c7a-372e-4ba6-8f6d-e300080d65d0 could not be found. [ 1613.225064] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-68275af2-ae8f-46ab-9c97-43ca2e39b453 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] [instance: 65462c7a-372e-4ba6-8f6d-e300080d65d0] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1613.225861] env[62405]: INFO nova.compute.manager [None req-68275af2-ae8f-46ab-9c97-43ca2e39b453 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] [instance: 65462c7a-372e-4ba6-8f6d-e300080d65d0] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1613.225861] env[62405]: DEBUG oslo.service.loopingcall [None req-68275af2-ae8f-46ab-9c97-43ca2e39b453 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1613.227935] env[62405]: DEBUG nova.compute.manager [-] [instance: 65462c7a-372e-4ba6-8f6d-e300080d65d0] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1613.228043] env[62405]: DEBUG nova.network.neutron [-] [instance: 65462c7a-372e-4ba6-8f6d-e300080d65d0] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1613.240153] env[62405]: DEBUG nova.compute.manager [None req-91a773c1-df78-4e8b-9e9d-bfbd423aeaaa tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] [instance: 262424b0-dc7d-4b6c-9539-2d6cd23a93da] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1613.240153] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-91a773c1-df78-4e8b-9e9d-bfbd423aeaaa tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] [instance: 262424b0-dc7d-4b6c-9539-2d6cd23a93da] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1613.240460] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80b581b6-6de6-435f-9afe-f5d35806ff28 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.255477] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-91a773c1-df78-4e8b-9e9d-bfbd423aeaaa tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] [instance: 262424b0-dc7d-4b6c-9539-2d6cd23a93da] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1613.255477] env[62405]: DEBUG nova.network.neutron [-] [instance: 65462c7a-372e-4ba6-8f6d-e300080d65d0] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1613.255477] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0d807ec7-6610-4102-a572-8daf32cb95e0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.261172] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b565dfe4-0dbd-4e2e-81af-c327d72ebdc2 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Lock "67bf25ea-5774-4246-a3e6-2aeb0ebf6731" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 72.200s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1613.261514] env[62405]: DEBUG oslo_vmware.api [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]520eb380-4de0-88af-5bba-23784a450ec4, 'name': SearchDatastore_Task, 'duration_secs': 0.029004} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1613.264727] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-553af1d9-91a0-416b-91a2-e00ea4159108 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.268913] env[62405]: DEBUG oslo_vmware.api [None req-91a773c1-df78-4e8b-9e9d-bfbd423aeaaa tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Waiting for the task: (returnval){ [ 1613.268913] env[62405]: value = "task-1947095" [ 1613.268913] env[62405]: _type = "Task" [ 1613.268913] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1613.274704] env[62405]: DEBUG oslo_vmware.api [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Waiting for the task: (returnval){ [ 1613.274704] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5251cedc-8ebb-ac23-8343-6977275da013" [ 1613.274704] env[62405]: _type = "Task" [ 1613.274704] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1613.281698] env[62405]: DEBUG oslo_vmware.api [None req-91a773c1-df78-4e8b-9e9d-bfbd423aeaaa tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Task: {'id': task-1947095, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1613.289437] env[62405]: DEBUG oslo_vmware.api [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5251cedc-8ebb-ac23-8343-6977275da013, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1613.297141] env[62405]: DEBUG oslo_vmware.api [None req-b97ea554-13a3-4305-999f-6075250c6fb4 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': task-1947091, 'name': PowerOffVM_Task, 'duration_secs': 0.362486} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1613.297403] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-b97ea554-13a3-4305-999f-6075250c6fb4 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b3647042-89a1-4d15-b85e-49a5c8def1d4] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1613.297579] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-b97ea554-13a3-4305-999f-6075250c6fb4 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b3647042-89a1-4d15-b85e-49a5c8def1d4] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1613.297829] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dd225fe2-fcf2-47a2-857f-28b441b4823a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.374489] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-b97ea554-13a3-4305-999f-6075250c6fb4 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b3647042-89a1-4d15-b85e-49a5c8def1d4] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1613.374489] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-b97ea554-13a3-4305-999f-6075250c6fb4 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b3647042-89a1-4d15-b85e-49a5c8def1d4] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1613.374716] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-b97ea554-13a3-4305-999f-6075250c6fb4 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Deleting the datastore file [datastore1] b3647042-89a1-4d15-b85e-49a5c8def1d4 {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1613.374967] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8538a1ac-32b2-4da3-b87a-d5f0fb71f536 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.382509] env[62405]: DEBUG oslo_vmware.api [None req-b97ea554-13a3-4305-999f-6075250c6fb4 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Waiting for the task: (returnval){ [ 1613.382509] env[62405]: value = "task-1947097" [ 1613.382509] env[62405]: _type = "Task" [ 1613.382509] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1613.391189] env[62405]: DEBUG oslo_vmware.api [None req-b97ea554-13a3-4305-999f-6075250c6fb4 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': task-1947097, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1613.395882] env[62405]: DEBUG nova.network.neutron [req-09780f71-787e-4c31-94ca-de557b4c8e96 req-3a5d90d2-2c22-4125-b761-d9a5c51e2389 service nova] [instance: 4d59d9fd-23df-4933-97ed-32602e51e9aa] Updated VIF entry in instance network info cache for port d7c38983-3ca5-4934-af4a-1bf5f845ec9a. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1613.396264] env[62405]: DEBUG nova.network.neutron [req-09780f71-787e-4c31-94ca-de557b4c8e96 req-3a5d90d2-2c22-4125-b761-d9a5c51e2389 service nova] [instance: 4d59d9fd-23df-4933-97ed-32602e51e9aa] Updating instance_info_cache with network_info: [{"id": "d7c38983-3ca5-4934-af4a-1bf5f845ec9a", "address": "fa:16:3e:12:b6:c9", "network": {"id": "c3707aa8-0488-4e47-a655-7bb7788995c0", "bridge": "br-int", "label": "tempest-ServersTestJSON-2054002489-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ba2fba100b943a2a415ec37b9365388", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "68ec9c06-8680-4a41-abad-cddbd1f768c9", "external-id": "nsx-vlan-transportzone-883", "segmentation_id": 883, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd7c38983-3c", "ovs_interfaceid": "d7c38983-3ca5-4934-af4a-1bf5f845ec9a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1613.578584] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 0f2d81cb-da2a-4664-b9a2-b8c3c38ddc73 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1613.709622] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947094, 'name': CreateVM_Task, 'duration_secs': 0.400344} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1613.709799] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4d59d9fd-23df-4933-97ed-32602e51e9aa] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1613.710453] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1613.710677] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1613.710944] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1613.711206] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3a11386b-2738-437b-adc4-87992ebb5b73 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.716319] env[62405]: DEBUG oslo_vmware.api [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Waiting for the task: (returnval){ [ 1613.716319] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52d53958-18be-ec50-cb05-04c35f6dc477" [ 1613.716319] env[62405]: _type = "Task" [ 1613.716319] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1613.723852] env[62405]: DEBUG oslo_vmware.api [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52d53958-18be-ec50-cb05-04c35f6dc477, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1613.762328] env[62405]: DEBUG nova.network.neutron [-] [instance: 65462c7a-372e-4ba6-8f6d-e300080d65d0] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1613.765137] env[62405]: DEBUG nova.compute.manager [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] [instance: a9f83357-4898-44ff-a6d8-ea6621453de9] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1613.783195] env[62405]: DEBUG oslo_vmware.api [None req-91a773c1-df78-4e8b-9e9d-bfbd423aeaaa tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Task: {'id': task-1947095, 'name': PowerOffVM_Task, 'duration_secs': 0.219686} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1613.783816] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-91a773c1-df78-4e8b-9e9d-bfbd423aeaaa tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] [instance: 262424b0-dc7d-4b6c-9539-2d6cd23a93da] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1613.783994] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-91a773c1-df78-4e8b-9e9d-bfbd423aeaaa tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] [instance: 262424b0-dc7d-4b6c-9539-2d6cd23a93da] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1613.784529] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fc2fa6d3-5d34-448d-9b4e-84d8013e5907 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.791437] env[62405]: DEBUG oslo_vmware.api [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5251cedc-8ebb-ac23-8343-6977275da013, 'name': SearchDatastore_Task, 'duration_secs': 0.023098} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1613.792311] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1613.792992] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 3c05b8fc-32b8-42e4-b3c2-95d1cdbc3dc8/3c05b8fc-32b8-42e4-b3c2-95d1cdbc3dc8.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1613.793257] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1ac6f5fb-1668-45ee-b7bb-2a39eb0754c7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.800738] env[62405]: DEBUG oslo_vmware.api [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Waiting for the task: (returnval){ [ 1613.800738] env[62405]: value = "task-1947099" [ 1613.800738] env[62405]: _type = "Task" [ 1613.800738] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1613.809130] env[62405]: DEBUG oslo_vmware.api [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Task: {'id': task-1947099, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1613.849084] env[62405]: DEBUG nova.compute.manager [req-89e76ecf-64aa-4ab2-bdb9-7082f7d3ffdf req-0199b009-6bc9-4701-958c-b971f4501099 service nova] [instance: 0feaeb5d-9f4a-4166-99b1-f213bc4fa458] Received event network-changed-feb1471c-63ad-4e63-bd9d-e413dee50694 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1613.849291] env[62405]: DEBUG nova.compute.manager [req-89e76ecf-64aa-4ab2-bdb9-7082f7d3ffdf req-0199b009-6bc9-4701-958c-b971f4501099 service nova] [instance: 0feaeb5d-9f4a-4166-99b1-f213bc4fa458] Refreshing instance network info cache due to event network-changed-feb1471c-63ad-4e63-bd9d-e413dee50694. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1613.849559] env[62405]: DEBUG oslo_concurrency.lockutils [req-89e76ecf-64aa-4ab2-bdb9-7082f7d3ffdf req-0199b009-6bc9-4701-958c-b971f4501099 service nova] Acquiring lock "refresh_cache-0feaeb5d-9f4a-4166-99b1-f213bc4fa458" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1613.849653] env[62405]: DEBUG oslo_concurrency.lockutils [req-89e76ecf-64aa-4ab2-bdb9-7082f7d3ffdf req-0199b009-6bc9-4701-958c-b971f4501099 service nova] Acquired lock "refresh_cache-0feaeb5d-9f4a-4166-99b1-f213bc4fa458" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1613.850066] env[62405]: DEBUG nova.network.neutron [req-89e76ecf-64aa-4ab2-bdb9-7082f7d3ffdf req-0199b009-6bc9-4701-958c-b971f4501099 service nova] [instance: 0feaeb5d-9f4a-4166-99b1-f213bc4fa458] Refreshing network info cache for port feb1471c-63ad-4e63-bd9d-e413dee50694 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1613.892881] env[62405]: DEBUG oslo_vmware.api [None req-b97ea554-13a3-4305-999f-6075250c6fb4 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': task-1947097, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.246162} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1613.893517] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-b97ea554-13a3-4305-999f-6075250c6fb4 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1613.893744] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-b97ea554-13a3-4305-999f-6075250c6fb4 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b3647042-89a1-4d15-b85e-49a5c8def1d4] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1613.893929] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-b97ea554-13a3-4305-999f-6075250c6fb4 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b3647042-89a1-4d15-b85e-49a5c8def1d4] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1613.894154] env[62405]: INFO nova.compute.manager [None req-b97ea554-13a3-4305-999f-6075250c6fb4 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b3647042-89a1-4d15-b85e-49a5c8def1d4] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1613.894418] env[62405]: DEBUG oslo.service.loopingcall [None req-b97ea554-13a3-4305-999f-6075250c6fb4 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1613.894601] env[62405]: DEBUG nova.compute.manager [-] [instance: b3647042-89a1-4d15-b85e-49a5c8def1d4] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1613.894695] env[62405]: DEBUG nova.network.neutron [-] [instance: b3647042-89a1-4d15-b85e-49a5c8def1d4] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1613.899193] env[62405]: DEBUG oslo_concurrency.lockutils [req-09780f71-787e-4c31-94ca-de557b4c8e96 req-3a5d90d2-2c22-4125-b761-d9a5c51e2389 service nova] Releasing lock "refresh_cache-4d59d9fd-23df-4933-97ed-32602e51e9aa" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1613.977670] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-91a773c1-df78-4e8b-9e9d-bfbd423aeaaa tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] [instance: 262424b0-dc7d-4b6c-9539-2d6cd23a93da] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1613.978259] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-91a773c1-df78-4e8b-9e9d-bfbd423aeaaa tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] [instance: 262424b0-dc7d-4b6c-9539-2d6cd23a93da] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1613.978712] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-91a773c1-df78-4e8b-9e9d-bfbd423aeaaa tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Deleting the datastore file [datastore1] 262424b0-dc7d-4b6c-9539-2d6cd23a93da {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1613.979177] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6f8de317-9a6e-4ec2-8fc5-f21b30f56d3e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.989047] env[62405]: DEBUG oslo_vmware.api [None req-91a773c1-df78-4e8b-9e9d-bfbd423aeaaa tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Waiting for the task: (returnval){ [ 1613.989047] env[62405]: value = "task-1947100" [ 1613.989047] env[62405]: _type = "Task" [ 1613.989047] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1613.998047] env[62405]: DEBUG oslo_vmware.api [None req-91a773c1-df78-4e8b-9e9d-bfbd423aeaaa tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Task: {'id': task-1947100, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1614.011678] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2031dff1-3b10-4be6-a3b5-1b62ae041342 tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Acquiring lock "0feaeb5d-9f4a-4166-99b1-f213bc4fa458" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1614.012411] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2031dff1-3b10-4be6-a3b5-1b62ae041342 tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Lock "0feaeb5d-9f4a-4166-99b1-f213bc4fa458" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1614.014614] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2031dff1-3b10-4be6-a3b5-1b62ae041342 tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Acquiring lock "0feaeb5d-9f4a-4166-99b1-f213bc4fa458-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1614.014614] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2031dff1-3b10-4be6-a3b5-1b62ae041342 tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Lock "0feaeb5d-9f4a-4166-99b1-f213bc4fa458-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1614.014614] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2031dff1-3b10-4be6-a3b5-1b62ae041342 tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Lock "0feaeb5d-9f4a-4166-99b1-f213bc4fa458-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1614.016164] env[62405]: INFO nova.compute.manager [None req-2031dff1-3b10-4be6-a3b5-1b62ae041342 tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] [instance: 0feaeb5d-9f4a-4166-99b1-f213bc4fa458] Terminating instance [ 1614.082625] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1614.227379] env[62405]: DEBUG nova.compute.manager [req-99c5ed45-20e0-439f-b7e1-a153ad43c043 req-a9f0006e-43a0-4b71-97b1-28b20b8bd599 service nova] [instance: b3647042-89a1-4d15-b85e-49a5c8def1d4] Received event network-vif-deleted-2ced9062-28c5-4183-a8d8-397cd40c9130 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1614.227763] env[62405]: INFO nova.compute.manager [req-99c5ed45-20e0-439f-b7e1-a153ad43c043 req-a9f0006e-43a0-4b71-97b1-28b20b8bd599 service nova] [instance: b3647042-89a1-4d15-b85e-49a5c8def1d4] Neutron deleted interface 2ced9062-28c5-4183-a8d8-397cd40c9130; detaching it from the instance and deleting it from the info cache [ 1614.228861] env[62405]: DEBUG nova.network.neutron [req-99c5ed45-20e0-439f-b7e1-a153ad43c043 req-a9f0006e-43a0-4b71-97b1-28b20b8bd599 service nova] [instance: b3647042-89a1-4d15-b85e-49a5c8def1d4] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1614.234205] env[62405]: DEBUG oslo_vmware.api [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52d53958-18be-ec50-cb05-04c35f6dc477, 'name': SearchDatastore_Task, 'duration_secs': 0.041582} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1614.234636] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1614.234993] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 4d59d9fd-23df-4933-97ed-32602e51e9aa] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1614.235410] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1614.238418] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1614.238418] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1614.238418] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-569269c7-03fd-4c6f-aa2f-13de505465d5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.258418] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1614.258598] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1614.259551] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e5da2ec4-9aa8-4042-9f45-3f9019e26edc {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.264839] env[62405]: INFO nova.compute.manager [-] [instance: 65462c7a-372e-4ba6-8f6d-e300080d65d0] Took 1.04 seconds to deallocate network for instance. [ 1614.267753] env[62405]: DEBUG oslo_vmware.api [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Waiting for the task: (returnval){ [ 1614.267753] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]524d882c-6789-6c01-ebb6-fe6ef92bd1bc" [ 1614.267753] env[62405]: _type = "Task" [ 1614.267753] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1614.284671] env[62405]: DEBUG oslo_vmware.api [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]524d882c-6789-6c01-ebb6-fe6ef92bd1bc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1614.297844] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1614.312104] env[62405]: DEBUG oslo_vmware.api [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Task: {'id': task-1947099, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1614.497981] env[62405]: DEBUG oslo_vmware.api [None req-91a773c1-df78-4e8b-9e9d-bfbd423aeaaa tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Task: {'id': task-1947100, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1614.523858] env[62405]: DEBUG nova.compute.manager [None req-2031dff1-3b10-4be6-a3b5-1b62ae041342 tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] [instance: 0feaeb5d-9f4a-4166-99b1-f213bc4fa458] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1614.523858] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-2031dff1-3b10-4be6-a3b5-1b62ae041342 tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] [instance: 0feaeb5d-9f4a-4166-99b1-f213bc4fa458] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1614.523858] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-381e20b1-ccc6-4960-a982-d1f5a7e81f57 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.539285] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-2031dff1-3b10-4be6-a3b5-1b62ae041342 tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] [instance: 0feaeb5d-9f4a-4166-99b1-f213bc4fa458] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1614.542601] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a45e84e3-c1f1-4d3d-b352-84a33823bbd0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.551167] env[62405]: DEBUG oslo_vmware.api [None req-2031dff1-3b10-4be6-a3b5-1b62ae041342 tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Waiting for the task: (returnval){ [ 1614.551167] env[62405]: value = "task-1947101" [ 1614.551167] env[62405]: _type = "Task" [ 1614.551167] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1614.562978] env[62405]: DEBUG oslo_vmware.api [None req-2031dff1-3b10-4be6-a3b5-1b62ae041342 tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Task: {'id': task-1947101, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1614.580357] env[62405]: DEBUG nova.network.neutron [req-89e76ecf-64aa-4ab2-bdb9-7082f7d3ffdf req-0199b009-6bc9-4701-958c-b971f4501099 service nova] [instance: 0feaeb5d-9f4a-4166-99b1-f213bc4fa458] Updated VIF entry in instance network info cache for port feb1471c-63ad-4e63-bd9d-e413dee50694. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1614.580724] env[62405]: DEBUG nova.network.neutron [req-89e76ecf-64aa-4ab2-bdb9-7082f7d3ffdf req-0199b009-6bc9-4701-958c-b971f4501099 service nova] [instance: 0feaeb5d-9f4a-4166-99b1-f213bc4fa458] Updating instance_info_cache with network_info: [{"id": "feb1471c-63ad-4e63-bd9d-e413dee50694", "address": "fa:16:3e:17:75:40", "network": {"id": "bf574ed2-2a7e-4cf2-aa38-0adccf456674", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-2099360932-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8cf1f39c8aef41df8c86777f80980664", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60567ee6-01d0-4b16-9c7a-4a896827d6eb", "external-id": "nsx-vlan-transportzone-28", "segmentation_id": 28, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfeb1471c-63", "ovs_interfaceid": "feb1471c-63ad-4e63-bd9d-e413dee50694", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1614.584906] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance c392d6f3-b638-4857-826d-760c38b7d291 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1614.678963] env[62405]: DEBUG nova.network.neutron [-] [instance: b3647042-89a1-4d15-b85e-49a5c8def1d4] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1614.730778] env[62405]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f1a7a4be-247f-47c4-9a87-4222f2ce5d46 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.741435] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05d242b5-eee2-437f-9a72-3e69e8b2d25c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.776350] env[62405]: DEBUG nova.compute.manager [req-99c5ed45-20e0-439f-b7e1-a153ad43c043 req-a9f0006e-43a0-4b71-97b1-28b20b8bd599 service nova] [instance: b3647042-89a1-4d15-b85e-49a5c8def1d4] Detach interface failed, port_id=2ced9062-28c5-4183-a8d8-397cd40c9130, reason: Instance b3647042-89a1-4d15-b85e-49a5c8def1d4 could not be found. {{(pid=62405) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11483}} [ 1614.779761] env[62405]: INFO nova.compute.manager [None req-68275af2-ae8f-46ab-9c97-43ca2e39b453 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] [instance: 65462c7a-372e-4ba6-8f6d-e300080d65d0] Instance disappeared during terminate [ 1614.779967] env[62405]: DEBUG oslo_concurrency.lockutils [None req-68275af2-ae8f-46ab-9c97-43ca2e39b453 tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Lock "65462c7a-372e-4ba6-8f6d-e300080d65d0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 3.219s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1614.786703] env[62405]: DEBUG oslo_vmware.api [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]524d882c-6789-6c01-ebb6-fe6ef92bd1bc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1614.812319] env[62405]: DEBUG oslo_vmware.api [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Task: {'id': task-1947099, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1614.998706] env[62405]: DEBUG oslo_vmware.api [None req-91a773c1-df78-4e8b-9e9d-bfbd423aeaaa tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Task: {'id': task-1947100, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1615.063910] env[62405]: DEBUG oslo_vmware.api [None req-2031dff1-3b10-4be6-a3b5-1b62ae041342 tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Task: {'id': task-1947101, 'name': PowerOffVM_Task, 'duration_secs': 0.44613} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1615.064242] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-2031dff1-3b10-4be6-a3b5-1b62ae041342 tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] [instance: 0feaeb5d-9f4a-4166-99b1-f213bc4fa458] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1615.064410] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-2031dff1-3b10-4be6-a3b5-1b62ae041342 tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] [instance: 0feaeb5d-9f4a-4166-99b1-f213bc4fa458] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1615.064656] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d3a0057f-cfca-421e-b238-9000decf5082 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.083524] env[62405]: DEBUG oslo_concurrency.lockutils [req-89e76ecf-64aa-4ab2-bdb9-7082f7d3ffdf req-0199b009-6bc9-4701-958c-b971f4501099 service nova] Releasing lock "refresh_cache-0feaeb5d-9f4a-4166-99b1-f213bc4fa458" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1615.087551] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 59957a81-5297-43d3-a673-024a53a19116 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1615.182790] env[62405]: INFO nova.compute.manager [-] [instance: b3647042-89a1-4d15-b85e-49a5c8def1d4] Took 1.29 seconds to deallocate network for instance. [ 1615.202962] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-2031dff1-3b10-4be6-a3b5-1b62ae041342 tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] [instance: 0feaeb5d-9f4a-4166-99b1-f213bc4fa458] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1615.203994] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-2031dff1-3b10-4be6-a3b5-1b62ae041342 tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] [instance: 0feaeb5d-9f4a-4166-99b1-f213bc4fa458] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1615.203994] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-2031dff1-3b10-4be6-a3b5-1b62ae041342 tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Deleting the datastore file [datastore1] 0feaeb5d-9f4a-4166-99b1-f213bc4fa458 {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1615.203994] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-06caef41-e46a-4b39-a6d6-ed9ce8a0a26a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.210933] env[62405]: DEBUG oslo_vmware.api [None req-2031dff1-3b10-4be6-a3b5-1b62ae041342 tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Waiting for the task: (returnval){ [ 1615.210933] env[62405]: value = "task-1947103" [ 1615.210933] env[62405]: _type = "Task" [ 1615.210933] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1615.219385] env[62405]: DEBUG oslo_vmware.api [None req-2031dff1-3b10-4be6-a3b5-1b62ae041342 tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Task: {'id': task-1947103, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1615.288084] env[62405]: DEBUG oslo_vmware.api [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]524d882c-6789-6c01-ebb6-fe6ef92bd1bc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1615.312980] env[62405]: DEBUG oslo_vmware.api [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Task: {'id': task-1947099, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1615.498837] env[62405]: DEBUG oslo_vmware.api [None req-91a773c1-df78-4e8b-9e9d-bfbd423aeaaa tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Task: {'id': task-1947100, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1615.591068] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1615.688377] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b97ea554-13a3-4305-999f-6075250c6fb4 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1615.721253] env[62405]: DEBUG oslo_vmware.api [None req-2031dff1-3b10-4be6-a3b5-1b62ae041342 tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Task: {'id': task-1947103, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1615.788508] env[62405]: DEBUG oslo_vmware.api [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]524d882c-6789-6c01-ebb6-fe6ef92bd1bc, 'name': SearchDatastore_Task, 'duration_secs': 1.483774} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1615.789400] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a77fc8c9-c9c9-4370-ac72-34abcd35abe9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.796442] env[62405]: DEBUG oslo_vmware.api [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Waiting for the task: (returnval){ [ 1615.796442] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52150649-3cdf-45c9-d0c1-dc4a19ccf36f" [ 1615.796442] env[62405]: _type = "Task" [ 1615.796442] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1615.807668] env[62405]: DEBUG oslo_vmware.api [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52150649-3cdf-45c9-d0c1-dc4a19ccf36f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1615.825511] env[62405]: DEBUG oslo_vmware.api [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Task: {'id': task-1947099, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1615.945861] env[62405]: DEBUG nova.compute.manager [req-936786b8-c19f-493e-9846-c79ee30f80d6 req-7aee8fd6-9fbc-4791-b80b-be0a2ed9728e service nova] [instance: 67bf25ea-5774-4246-a3e6-2aeb0ebf6731] Received event network-changed-165104f7-de0a-47de-a4a4-918b51216f4d {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1615.946113] env[62405]: DEBUG nova.compute.manager [req-936786b8-c19f-493e-9846-c79ee30f80d6 req-7aee8fd6-9fbc-4791-b80b-be0a2ed9728e service nova] [instance: 67bf25ea-5774-4246-a3e6-2aeb0ebf6731] Refreshing instance network info cache due to event network-changed-165104f7-de0a-47de-a4a4-918b51216f4d. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1615.946333] env[62405]: DEBUG oslo_concurrency.lockutils [req-936786b8-c19f-493e-9846-c79ee30f80d6 req-7aee8fd6-9fbc-4791-b80b-be0a2ed9728e service nova] Acquiring lock "refresh_cache-67bf25ea-5774-4246-a3e6-2aeb0ebf6731" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1615.946496] env[62405]: DEBUG oslo_concurrency.lockutils [req-936786b8-c19f-493e-9846-c79ee30f80d6 req-7aee8fd6-9fbc-4791-b80b-be0a2ed9728e service nova] Acquired lock "refresh_cache-67bf25ea-5774-4246-a3e6-2aeb0ebf6731" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1615.946682] env[62405]: DEBUG nova.network.neutron [req-936786b8-c19f-493e-9846-c79ee30f80d6 req-7aee8fd6-9fbc-4791-b80b-be0a2ed9728e service nova] [instance: 67bf25ea-5774-4246-a3e6-2aeb0ebf6731] Refreshing network info cache for port 165104f7-de0a-47de-a4a4-918b51216f4d {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1616.001784] env[62405]: DEBUG oslo_vmware.api [None req-91a773c1-df78-4e8b-9e9d-bfbd423aeaaa tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Task: {'id': task-1947100, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1616.094163] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance a9f83357-4898-44ff-a6d8-ea6621453de9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1616.221684] env[62405]: DEBUG oslo_vmware.api [None req-2031dff1-3b10-4be6-a3b5-1b62ae041342 tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Task: {'id': task-1947103, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.951991} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1616.222008] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-2031dff1-3b10-4be6-a3b5-1b62ae041342 tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1616.222201] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-2031dff1-3b10-4be6-a3b5-1b62ae041342 tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] [instance: 0feaeb5d-9f4a-4166-99b1-f213bc4fa458] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1616.222608] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-2031dff1-3b10-4be6-a3b5-1b62ae041342 tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] [instance: 0feaeb5d-9f4a-4166-99b1-f213bc4fa458] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1616.222608] env[62405]: INFO nova.compute.manager [None req-2031dff1-3b10-4be6-a3b5-1b62ae041342 tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] [instance: 0feaeb5d-9f4a-4166-99b1-f213bc4fa458] Took 1.70 seconds to destroy the instance on the hypervisor. [ 1616.222876] env[62405]: DEBUG oslo.service.loopingcall [None req-2031dff1-3b10-4be6-a3b5-1b62ae041342 tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1616.223098] env[62405]: DEBUG nova.compute.manager [-] [instance: 0feaeb5d-9f4a-4166-99b1-f213bc4fa458] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1616.223217] env[62405]: DEBUG nova.network.neutron [-] [instance: 0feaeb5d-9f4a-4166-99b1-f213bc4fa458] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1616.307225] env[62405]: DEBUG oslo_vmware.api [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52150649-3cdf-45c9-d0c1-dc4a19ccf36f, 'name': SearchDatastore_Task, 'duration_secs': 0.06052} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1616.307587] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1616.307846] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 4d59d9fd-23df-4933-97ed-32602e51e9aa/4d59d9fd-23df-4933-97ed-32602e51e9aa.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1616.310865] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bfb99f8e-fae7-4e74-95ae-6240a18fd81b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.320928] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e2830b08-1efc-4547-9610-56ed95521a7e tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] Acquiring lock "3f9849b8-6aaa-4d32-b140-207d5b54d68f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1616.321190] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e2830b08-1efc-4547-9610-56ed95521a7e tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] Lock "3f9849b8-6aaa-4d32-b140-207d5b54d68f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1616.321493] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e2830b08-1efc-4547-9610-56ed95521a7e tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] Acquiring lock "3f9849b8-6aaa-4d32-b140-207d5b54d68f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1616.321570] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e2830b08-1efc-4547-9610-56ed95521a7e tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] Lock "3f9849b8-6aaa-4d32-b140-207d5b54d68f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1616.321740] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e2830b08-1efc-4547-9610-56ed95521a7e tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] Lock "3f9849b8-6aaa-4d32-b140-207d5b54d68f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1616.323458] env[62405]: DEBUG oslo_vmware.api [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Task: {'id': task-1947099, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.230531} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1616.325138] env[62405]: INFO nova.compute.manager [None req-e2830b08-1efc-4547-9610-56ed95521a7e tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] [instance: 3f9849b8-6aaa-4d32-b140-207d5b54d68f] Terminating instance [ 1616.326593] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 3c05b8fc-32b8-42e4-b3c2-95d1cdbc3dc8/3c05b8fc-32b8-42e4-b3c2-95d1cdbc3dc8.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1616.326811] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] [instance: 3c05b8fc-32b8-42e4-b3c2-95d1cdbc3dc8] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1616.327138] env[62405]: DEBUG oslo_vmware.api [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Waiting for the task: (returnval){ [ 1616.327138] env[62405]: value = "task-1947104" [ 1616.327138] env[62405]: _type = "Task" [ 1616.327138] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1616.328184] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7e657b83-3da5-4a28-a3ef-e01858c594eb {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.340216] env[62405]: DEBUG oslo_vmware.api [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947104, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1616.341583] env[62405]: DEBUG oslo_vmware.api [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Waiting for the task: (returnval){ [ 1616.341583] env[62405]: value = "task-1947105" [ 1616.341583] env[62405]: _type = "Task" [ 1616.341583] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1616.350592] env[62405]: DEBUG oslo_vmware.api [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Task: {'id': task-1947105, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1616.503884] env[62405]: DEBUG oslo_vmware.api [None req-91a773c1-df78-4e8b-9e9d-bfbd423aeaaa tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Task: {'id': task-1947100, 'name': DeleteDatastoreFile_Task, 'duration_secs': 2.075155} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1616.505176] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-91a773c1-df78-4e8b-9e9d-bfbd423aeaaa tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1616.505399] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-91a773c1-df78-4e8b-9e9d-bfbd423aeaaa tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] [instance: 262424b0-dc7d-4b6c-9539-2d6cd23a93da] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1616.505575] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-91a773c1-df78-4e8b-9e9d-bfbd423aeaaa tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] [instance: 262424b0-dc7d-4b6c-9539-2d6cd23a93da] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1616.505769] env[62405]: INFO nova.compute.manager [None req-91a773c1-df78-4e8b-9e9d-bfbd423aeaaa tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] [instance: 262424b0-dc7d-4b6c-9539-2d6cd23a93da] Took 3.27 seconds to destroy the instance on the hypervisor. [ 1616.506084] env[62405]: DEBUG oslo.service.loopingcall [None req-91a773c1-df78-4e8b-9e9d-bfbd423aeaaa tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1616.506786] env[62405]: DEBUG nova.compute.manager [-] [instance: 262424b0-dc7d-4b6c-9539-2d6cd23a93da] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1616.507100] env[62405]: DEBUG nova.network.neutron [-] [instance: 262424b0-dc7d-4b6c-9539-2d6cd23a93da] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1616.527411] env[62405]: DEBUG nova.compute.manager [req-7429c3d5-9d3e-4401-bd99-466d00bb3f42 req-3bb87455-21e1-465e-a406-a5e1d7f2d2f9 service nova] [instance: 0feaeb5d-9f4a-4166-99b1-f213bc4fa458] Received event network-vif-deleted-feb1471c-63ad-4e63-bd9d-e413dee50694 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1616.527611] env[62405]: INFO nova.compute.manager [req-7429c3d5-9d3e-4401-bd99-466d00bb3f42 req-3bb87455-21e1-465e-a406-a5e1d7f2d2f9 service nova] [instance: 0feaeb5d-9f4a-4166-99b1-f213bc4fa458] Neutron deleted interface feb1471c-63ad-4e63-bd9d-e413dee50694; detaching it from the instance and deleting it from the info cache [ 1616.527780] env[62405]: DEBUG nova.network.neutron [req-7429c3d5-9d3e-4401-bd99-466d00bb3f42 req-3bb87455-21e1-465e-a406-a5e1d7f2d2f9 service nova] [instance: 0feaeb5d-9f4a-4166-99b1-f213bc4fa458] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1616.598326] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance a6a0e918-425d-44de-a22b-8779e9108533 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1616.767100] env[62405]: DEBUG nova.network.neutron [req-936786b8-c19f-493e-9846-c79ee30f80d6 req-7aee8fd6-9fbc-4791-b80b-be0a2ed9728e service nova] [instance: 67bf25ea-5774-4246-a3e6-2aeb0ebf6731] Updated VIF entry in instance network info cache for port 165104f7-de0a-47de-a4a4-918b51216f4d. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1616.767358] env[62405]: DEBUG nova.network.neutron [req-936786b8-c19f-493e-9846-c79ee30f80d6 req-7aee8fd6-9fbc-4791-b80b-be0a2ed9728e service nova] [instance: 67bf25ea-5774-4246-a3e6-2aeb0ebf6731] Updating instance_info_cache with network_info: [{"id": "165104f7-de0a-47de-a4a4-918b51216f4d", "address": "fa:16:3e:d3:c8:b8", "network": {"id": "dfd15c21-b7a5-492a-afe3-8eb96515351d", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-175692276-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6014bab6bc9a4b059bab88e44b31f446", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "950a2f67-7668-4376-9d48-b38dca033c40", "external-id": "nsx-vlan-transportzone-549", "segmentation_id": 549, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap165104f7-de", "ovs_interfaceid": "165104f7-de0a-47de-a4a4-918b51216f4d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1616.832106] env[62405]: DEBUG nova.compute.manager [None req-e2830b08-1efc-4547-9610-56ed95521a7e tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] [instance: 3f9849b8-6aaa-4d32-b140-207d5b54d68f] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1616.832506] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2830b08-1efc-4547-9610-56ed95521a7e tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] [instance: 3f9849b8-6aaa-4d32-b140-207d5b54d68f] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1616.836551] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-798e622f-9e7d-4359-91e7-3a666d7ec489 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.845259] env[62405]: DEBUG oslo_vmware.api [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947104, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1616.851296] env[62405]: DEBUG oslo_vmware.api [None req-e2830b08-1efc-4547-9610-56ed95521a7e tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] Waiting for the task: (returnval){ [ 1616.851296] env[62405]: value = "task-1947106" [ 1616.851296] env[62405]: _type = "Task" [ 1616.851296] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1616.858571] env[62405]: DEBUG oslo_vmware.api [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Task: {'id': task-1947105, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069243} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1616.859207] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] [instance: 3c05b8fc-32b8-42e4-b3c2-95d1cdbc3dc8] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1616.860105] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01590305-0f73-40c5-9a21-83fe3386925b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.865958] env[62405]: DEBUG oslo_vmware.api [None req-e2830b08-1efc-4547-9610-56ed95521a7e tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] Task: {'id': task-1947106, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1616.886623] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] [instance: 3c05b8fc-32b8-42e4-b3c2-95d1cdbc3dc8] Reconfiguring VM instance instance-00000025 to attach disk [datastore1] 3c05b8fc-32b8-42e4-b3c2-95d1cdbc3dc8/3c05b8fc-32b8-42e4-b3c2-95d1cdbc3dc8.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1616.886885] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-07639c7c-7701-4d9a-9ea0-55ee6f219b33 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.907773] env[62405]: DEBUG oslo_vmware.api [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Waiting for the task: (returnval){ [ 1616.907773] env[62405]: value = "task-1947107" [ 1616.907773] env[62405]: _type = "Task" [ 1616.907773] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1616.916106] env[62405]: DEBUG oslo_vmware.api [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Task: {'id': task-1947107, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1616.996469] env[62405]: DEBUG nova.network.neutron [-] [instance: 0feaeb5d-9f4a-4166-99b1-f213bc4fa458] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1617.032094] env[62405]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-415e3ae8-8c15-48ad-acf9-1d08e1f4f328 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.042883] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91855170-e89e-4f5c-af30-275165ba8594 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.091837] env[62405]: DEBUG nova.compute.manager [req-7429c3d5-9d3e-4401-bd99-466d00bb3f42 req-3bb87455-21e1-465e-a406-a5e1d7f2d2f9 service nova] [instance: 0feaeb5d-9f4a-4166-99b1-f213bc4fa458] Detach interface failed, port_id=feb1471c-63ad-4e63-bd9d-e413dee50694, reason: Instance 0feaeb5d-9f4a-4166-99b1-f213bc4fa458 could not be found. {{(pid=62405) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11483}} [ 1617.101197] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 6213702e-8e39-4342-b62f-2c9495017bf9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1617.270147] env[62405]: DEBUG oslo_concurrency.lockutils [req-936786b8-c19f-493e-9846-c79ee30f80d6 req-7aee8fd6-9fbc-4791-b80b-be0a2ed9728e service nova] Releasing lock "refresh_cache-67bf25ea-5774-4246-a3e6-2aeb0ebf6731" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1617.295337] env[62405]: DEBUG nova.network.neutron [-] [instance: 262424b0-dc7d-4b6c-9539-2d6cd23a93da] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1617.345200] env[62405]: DEBUG oslo_vmware.api [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947104, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.904683} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1617.345667] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 4d59d9fd-23df-4933-97ed-32602e51e9aa/4d59d9fd-23df-4933-97ed-32602e51e9aa.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1617.345937] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 4d59d9fd-23df-4933-97ed-32602e51e9aa] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1617.346234] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-722ab78c-401b-4662-ba01-5465420710c3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.356961] env[62405]: DEBUG oslo_vmware.api [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Waiting for the task: (returnval){ [ 1617.356961] env[62405]: value = "task-1947108" [ 1617.356961] env[62405]: _type = "Task" [ 1617.356961] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1617.364033] env[62405]: DEBUG oslo_vmware.api [None req-e2830b08-1efc-4547-9610-56ed95521a7e tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] Task: {'id': task-1947106, 'name': PowerOffVM_Task, 'duration_secs': 0.273401} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1617.364999] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2830b08-1efc-4547-9610-56ed95521a7e tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] [instance: 3f9849b8-6aaa-4d32-b140-207d5b54d68f] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1617.365168] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-e2830b08-1efc-4547-9610-56ed95521a7e tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] [instance: 3f9849b8-6aaa-4d32-b140-207d5b54d68f] Volume detach. Driver type: vmdk {{(pid=62405) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1617.365448] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-e2830b08-1efc-4547-9610-56ed95521a7e tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] [instance: 3f9849b8-6aaa-4d32-b140-207d5b54d68f] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-401291', 'volume_id': '09314eff-d5f0-4a4a-a4b2-f7844bc0cf35', 'name': 'volume-09314eff-d5f0-4a4a-a4b2-f7844bc0cf35', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '3f9849b8-6aaa-4d32-b140-207d5b54d68f', 'attached_at': '', 'detached_at': '', 'volume_id': '09314eff-d5f0-4a4a-a4b2-f7844bc0cf35', 'serial': '09314eff-d5f0-4a4a-a4b2-f7844bc0cf35'} {{(pid=62405) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1617.366309] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ebf5209-7fb1-423a-b464-6f62bd72b36a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.372249] env[62405]: DEBUG oslo_vmware.api [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947108, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1617.390204] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9d97981-6c71-4ef6-b41e-b99480a7c7f3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.398449] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79114490-07ad-421d-abe1-fcb983b28880 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.424290] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1054eaf-3778-4374-9d1e-49fd7448c8a2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.435663] env[62405]: DEBUG oslo_vmware.api [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Task: {'id': task-1947107, 'name': ReconfigVM_Task, 'duration_secs': 0.402367} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1617.446642] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] [instance: 3c05b8fc-32b8-42e4-b3c2-95d1cdbc3dc8] Reconfigured VM instance instance-00000025 to attach disk [datastore1] 3c05b8fc-32b8-42e4-b3c2-95d1cdbc3dc8/3c05b8fc-32b8-42e4-b3c2-95d1cdbc3dc8.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1617.447678] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-e2830b08-1efc-4547-9610-56ed95521a7e tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] The volume has not been displaced from its original location: [datastore1] volume-09314eff-d5f0-4a4a-a4b2-f7844bc0cf35/volume-09314eff-d5f0-4a4a-a4b2-f7844bc0cf35.vmdk. No consolidation needed. {{(pid=62405) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1617.452661] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-e2830b08-1efc-4547-9610-56ed95521a7e tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] [instance: 3f9849b8-6aaa-4d32-b140-207d5b54d68f] Reconfiguring VM instance instance-00000010 to detach disk 2000 {{(pid=62405) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1617.452966] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4bfa7904-d2a2-4d8f-a9aa-620de6a05550 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.455090] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a9ecd33d-c995-4a28-a643-cd1cd3935cb2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.474428] env[62405]: DEBUG oslo_vmware.api [None req-e2830b08-1efc-4547-9610-56ed95521a7e tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] Waiting for the task: (returnval){ [ 1617.474428] env[62405]: value = "task-1947110" [ 1617.474428] env[62405]: _type = "Task" [ 1617.474428] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1617.475538] env[62405]: DEBUG oslo_vmware.api [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Waiting for the task: (returnval){ [ 1617.475538] env[62405]: value = "task-1947109" [ 1617.475538] env[62405]: _type = "Task" [ 1617.475538] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1617.487561] env[62405]: DEBUG oslo_vmware.api [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Task: {'id': task-1947109, 'name': Rename_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1617.490689] env[62405]: DEBUG oslo_vmware.api [None req-e2830b08-1efc-4547-9610-56ed95521a7e tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] Task: {'id': task-1947110, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1617.500034] env[62405]: INFO nova.compute.manager [-] [instance: 0feaeb5d-9f4a-4166-99b1-f213bc4fa458] Took 1.28 seconds to deallocate network for instance. [ 1617.605125] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 377365a4-7538-4bab-a181-1940e6fb4066 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1617.605601] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Total usable vcpus: 48, total allocated vcpus: 19 {{(pid=62405) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1617.606169] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=4224MB phys_disk=200GB used_disk=17GB total_vcpus=48 used_vcpus=19 pci_stats=[] {{(pid=62405) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1617.798362] env[62405]: INFO nova.compute.manager [-] [instance: 262424b0-dc7d-4b6c-9539-2d6cd23a93da] Took 1.29 seconds to deallocate network for instance. [ 1617.869395] env[62405]: DEBUG oslo_vmware.api [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947108, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.096784} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1617.871977] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 4d59d9fd-23df-4933-97ed-32602e51e9aa] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1617.872995] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fe6b718-47da-4b62-a7c2-5bdc8d316903 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.896572] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 4d59d9fd-23df-4933-97ed-32602e51e9aa] Reconfiguring VM instance instance-00000026 to attach disk [datastore1] 4d59d9fd-23df-4933-97ed-32602e51e9aa/4d59d9fd-23df-4933-97ed-32602e51e9aa.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1617.899721] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2d66117e-6fd0-4264-a4bb-0ad60ccaff0e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.920681] env[62405]: DEBUG oslo_vmware.api [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Waiting for the task: (returnval){ [ 1617.920681] env[62405]: value = "task-1947111" [ 1617.920681] env[62405]: _type = "Task" [ 1617.920681] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1617.930414] env[62405]: DEBUG oslo_vmware.api [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947111, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1618.001077] env[62405]: DEBUG oslo_vmware.api [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Task: {'id': task-1947109, 'name': Rename_Task, 'duration_secs': 0.206884} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1618.001405] env[62405]: DEBUG oslo_vmware.api [None req-e2830b08-1efc-4547-9610-56ed95521a7e tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] Task: {'id': task-1947110, 'name': ReconfigVM_Task, 'duration_secs': 0.192745} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1618.004073] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] [instance: 3c05b8fc-32b8-42e4-b3c2-95d1cdbc3dc8] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1618.004383] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-e2830b08-1efc-4547-9610-56ed95521a7e tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] [instance: 3f9849b8-6aaa-4d32-b140-207d5b54d68f] Reconfigured VM instance instance-00000010 to detach disk 2000 {{(pid=62405) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1618.011462] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2031dff1-3b10-4be6-a3b5-1b62ae041342 tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1618.011462] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-855dc7e3-890b-4a90-94a5-83cda9463125 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.013048] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7a4b2c62-615e-4e68-bdd6-733fee736265 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.032657] env[62405]: DEBUG oslo_vmware.api [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Waiting for the task: (returnval){ [ 1618.032657] env[62405]: value = "task-1947112" [ 1618.032657] env[62405]: _type = "Task" [ 1618.032657] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1618.034046] env[62405]: DEBUG oslo_vmware.api [None req-e2830b08-1efc-4547-9610-56ed95521a7e tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] Waiting for the task: (returnval){ [ 1618.034046] env[62405]: value = "task-1947113" [ 1618.034046] env[62405]: _type = "Task" [ 1618.034046] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1618.048371] env[62405]: DEBUG oslo_vmware.api [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Task: {'id': task-1947112, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1618.052339] env[62405]: DEBUG oslo_vmware.api [None req-e2830b08-1efc-4547-9610-56ed95521a7e tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] Task: {'id': task-1947113, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1618.142905] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70139600-e49c-465b-8216-2379e01b6f8c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.152928] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2677b49-b86f-4025-bf80-f3e7a6aff12e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.183491] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4905bc04-6b2b-45c3-91fc-0d4acb1891ba {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.192442] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31430149-e1e5-4514-ac05-ea844e420a42 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.206831] env[62405]: DEBUG nova.compute.provider_tree [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1618.307331] env[62405]: DEBUG oslo_concurrency.lockutils [None req-91a773c1-df78-4e8b-9e9d-bfbd423aeaaa tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1618.433509] env[62405]: DEBUG oslo_vmware.api [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947111, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1618.548359] env[62405]: DEBUG oslo_vmware.api [None req-e2830b08-1efc-4547-9610-56ed95521a7e tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] Task: {'id': task-1947113, 'name': ReconfigVM_Task, 'duration_secs': 0.408693} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1618.551667] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-e2830b08-1efc-4547-9610-56ed95521a7e tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] [instance: 3f9849b8-6aaa-4d32-b140-207d5b54d68f] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-401291', 'volume_id': '09314eff-d5f0-4a4a-a4b2-f7844bc0cf35', 'name': 'volume-09314eff-d5f0-4a4a-a4b2-f7844bc0cf35', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '3f9849b8-6aaa-4d32-b140-207d5b54d68f', 'attached_at': '', 'detached_at': '', 'volume_id': '09314eff-d5f0-4a4a-a4b2-f7844bc0cf35', 'serial': '09314eff-d5f0-4a4a-a4b2-f7844bc0cf35'} {{(pid=62405) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1618.551971] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-e2830b08-1efc-4547-9610-56ed95521a7e tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] [instance: 3f9849b8-6aaa-4d32-b140-207d5b54d68f] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1618.552413] env[62405]: DEBUG oslo_vmware.api [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Task: {'id': task-1947112, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1618.553150] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-872df30f-dc36-4361-a093-538ccc8a2880 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.560347] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-e2830b08-1efc-4547-9610-56ed95521a7e tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] [instance: 3f9849b8-6aaa-4d32-b140-207d5b54d68f] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1618.560588] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-921270ee-bd95-4af3-ae6a-c5d15e24185c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.564512] env[62405]: DEBUG nova.compute.manager [req-c5f14be3-f8f5-446e-b7c7-989f78ffeb87 req-dd294796-2635-4fd5-b0d2-93b49b5083fe service nova] [instance: 262424b0-dc7d-4b6c-9539-2d6cd23a93da] Received event network-vif-deleted-0974798a-a146-421e-a104-caeb56db51b3 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1618.644879] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-e2830b08-1efc-4547-9610-56ed95521a7e tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] [instance: 3f9849b8-6aaa-4d32-b140-207d5b54d68f] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1618.645138] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-e2830b08-1efc-4547-9610-56ed95521a7e tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] [instance: 3f9849b8-6aaa-4d32-b140-207d5b54d68f] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1618.645389] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2830b08-1efc-4547-9610-56ed95521a7e tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] Deleting the datastore file [datastore1] 3f9849b8-6aaa-4d32-b140-207d5b54d68f {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1618.645879] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-62884162-682a-49a2-aa3e-7852aaa3b1c3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.653142] env[62405]: DEBUG oslo_vmware.api [None req-e2830b08-1efc-4547-9610-56ed95521a7e tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] Waiting for the task: (returnval){ [ 1618.653142] env[62405]: value = "task-1947115" [ 1618.653142] env[62405]: _type = "Task" [ 1618.653142] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1618.661857] env[62405]: DEBUG oslo_vmware.api [None req-e2830b08-1efc-4547-9610-56ed95521a7e tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] Task: {'id': task-1947115, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1618.710288] env[62405]: DEBUG nova.scheduler.client.report [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1618.932150] env[62405]: DEBUG oslo_vmware.api [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947111, 'name': ReconfigVM_Task, 'duration_secs': 0.656814} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1618.932430] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 4d59d9fd-23df-4933-97ed-32602e51e9aa] Reconfigured VM instance instance-00000026 to attach disk [datastore1] 4d59d9fd-23df-4933-97ed-32602e51e9aa/4d59d9fd-23df-4933-97ed-32602e51e9aa.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1618.933074] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5e4fb173-224f-40a3-8019-1be720d2d733 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.941120] env[62405]: DEBUG oslo_vmware.api [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Waiting for the task: (returnval){ [ 1618.941120] env[62405]: value = "task-1947116" [ 1618.941120] env[62405]: _type = "Task" [ 1618.941120] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1618.949715] env[62405]: DEBUG oslo_vmware.api [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947116, 'name': Rename_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1619.045326] env[62405]: DEBUG oslo_vmware.api [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Task: {'id': task-1947112, 'name': PowerOnVM_Task, 'duration_secs': 0.734215} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1619.046027] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] [instance: 3c05b8fc-32b8-42e4-b3c2-95d1cdbc3dc8] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1619.046027] env[62405]: INFO nova.compute.manager [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] [instance: 3c05b8fc-32b8-42e4-b3c2-95d1cdbc3dc8] Took 11.03 seconds to spawn the instance on the hypervisor. [ 1619.046027] env[62405]: DEBUG nova.compute.manager [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] [instance: 3c05b8fc-32b8-42e4-b3c2-95d1cdbc3dc8] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1619.046782] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69b54af6-63d5-4b55-84c7-24d0d2132a51 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.162780] env[62405]: DEBUG oslo_vmware.api [None req-e2830b08-1efc-4547-9610-56ed95521a7e tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] Task: {'id': task-1947115, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1619.216751] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62405) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1619.217043] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 8.199s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1619.217664] env[62405]: DEBUG oslo_concurrency.lockutils [None req-1d13a535-5f61-4ffa-b78f-a8c9ac45767e tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.041s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1619.220094] env[62405]: INFO nova.compute.claims [None req-1d13a535-5f61-4ffa-b78f-a8c9ac45767e tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] [instance: d5686d7c-a73f-4e02-8726-eab8221a0eae] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1619.452844] env[62405]: DEBUG oslo_vmware.api [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947116, 'name': Rename_Task, 'duration_secs': 0.191066} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1619.453206] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 4d59d9fd-23df-4933-97ed-32602e51e9aa] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1619.453451] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2e113003-9e77-41f5-874a-afb109d6f7e1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.461473] env[62405]: DEBUG oslo_vmware.api [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Waiting for the task: (returnval){ [ 1619.461473] env[62405]: value = "task-1947117" [ 1619.461473] env[62405]: _type = "Task" [ 1619.461473] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1619.471842] env[62405]: DEBUG oslo_vmware.api [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947117, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1619.564609] env[62405]: INFO nova.compute.manager [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] [instance: 3c05b8fc-32b8-42e4-b3c2-95d1cdbc3dc8] Took 43.79 seconds to build instance. [ 1619.665288] env[62405]: DEBUG oslo_vmware.api [None req-e2830b08-1efc-4547-9610-56ed95521a7e tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] Task: {'id': task-1947115, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1619.972878] env[62405]: DEBUG oslo_vmware.api [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947117, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1620.069023] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0a96eb22-bf73-4e69-8fe3-213f65fb7912 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Lock "3c05b8fc-32b8-42e4-b3c2-95d1cdbc3dc8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 69.009s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1620.166377] env[62405]: DEBUG oslo_vmware.api [None req-e2830b08-1efc-4547-9610-56ed95521a7e tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] Task: {'id': task-1947115, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.477699} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1620.166665] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2830b08-1efc-4547-9610-56ed95521a7e tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1620.166904] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-e2830b08-1efc-4547-9610-56ed95521a7e tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] [instance: 3f9849b8-6aaa-4d32-b140-207d5b54d68f] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1620.167113] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-e2830b08-1efc-4547-9610-56ed95521a7e tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] [instance: 3f9849b8-6aaa-4d32-b140-207d5b54d68f] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1620.167295] env[62405]: INFO nova.compute.manager [None req-e2830b08-1efc-4547-9610-56ed95521a7e tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] [instance: 3f9849b8-6aaa-4d32-b140-207d5b54d68f] Took 3.34 seconds to destroy the instance on the hypervisor. [ 1620.167862] env[62405]: DEBUG oslo.service.loopingcall [None req-e2830b08-1efc-4547-9610-56ed95521a7e tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1620.168094] env[62405]: DEBUG nova.compute.manager [-] [instance: 3f9849b8-6aaa-4d32-b140-207d5b54d68f] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1620.168187] env[62405]: DEBUG nova.network.neutron [-] [instance: 3f9849b8-6aaa-4d32-b140-207d5b54d68f] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1620.476413] env[62405]: DEBUG oslo_vmware.api [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947117, 'name': PowerOnVM_Task, 'duration_secs': 0.609853} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1620.476792] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 4d59d9fd-23df-4933-97ed-32602e51e9aa] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1620.477176] env[62405]: INFO nova.compute.manager [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 4d59d9fd-23df-4933-97ed-32602e51e9aa] Took 9.74 seconds to spawn the instance on the hypervisor. [ 1620.477292] env[62405]: DEBUG nova.compute.manager [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 4d59d9fd-23df-4933-97ed-32602e51e9aa] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1620.478165] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e3efbce-f098-4779-91ca-05941b7f6de1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.513103] env[62405]: DEBUG oslo_concurrency.lockutils [None req-584b1867-8b26-47e3-90b1-4fdfb2fd4e54 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Acquiring lock "3c05b8fc-32b8-42e4-b3c2-95d1cdbc3dc8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1620.513103] env[62405]: DEBUG oslo_concurrency.lockutils [None req-584b1867-8b26-47e3-90b1-4fdfb2fd4e54 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Lock "3c05b8fc-32b8-42e4-b3c2-95d1cdbc3dc8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1620.513225] env[62405]: DEBUG oslo_concurrency.lockutils [None req-584b1867-8b26-47e3-90b1-4fdfb2fd4e54 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Acquiring lock "3c05b8fc-32b8-42e4-b3c2-95d1cdbc3dc8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1620.513423] env[62405]: DEBUG oslo_concurrency.lockutils [None req-584b1867-8b26-47e3-90b1-4fdfb2fd4e54 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Lock "3c05b8fc-32b8-42e4-b3c2-95d1cdbc3dc8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1620.513627] env[62405]: DEBUG oslo_concurrency.lockutils [None req-584b1867-8b26-47e3-90b1-4fdfb2fd4e54 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Lock "3c05b8fc-32b8-42e4-b3c2-95d1cdbc3dc8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1620.517419] env[62405]: INFO nova.compute.manager [None req-584b1867-8b26-47e3-90b1-4fdfb2fd4e54 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] [instance: 3c05b8fc-32b8-42e4-b3c2-95d1cdbc3dc8] Terminating instance [ 1620.572259] env[62405]: DEBUG nova.compute.manager [None req-5974aa0f-586f-47ba-95a5-8e34a83e9507 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a6a0e918-425d-44de-a22b-8779e9108533] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1620.715907] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a4e0ad0-1dab-4cdb-8ea2-de36bbc98700 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.721982] env[62405]: DEBUG nova.compute.manager [req-86362e3b-d2f8-4f35-abc2-e363e0777f04 req-861ff349-6792-4629-a104-c3f16be60972 service nova] [instance: 3f9849b8-6aaa-4d32-b140-207d5b54d68f] Received event network-vif-deleted-ec10e8ab-572c-4bfa-810d-befff7776996 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1620.722221] env[62405]: INFO nova.compute.manager [req-86362e3b-d2f8-4f35-abc2-e363e0777f04 req-861ff349-6792-4629-a104-c3f16be60972 service nova] [instance: 3f9849b8-6aaa-4d32-b140-207d5b54d68f] Neutron deleted interface ec10e8ab-572c-4bfa-810d-befff7776996; detaching it from the instance and deleting it from the info cache [ 1620.722441] env[62405]: DEBUG nova.network.neutron [req-86362e3b-d2f8-4f35-abc2-e363e0777f04 req-861ff349-6792-4629-a104-c3f16be60972 service nova] [instance: 3f9849b8-6aaa-4d32-b140-207d5b54d68f] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1620.727503] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd2cf75c-5ac6-46a9-8f37-6bdbbecb1596 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.762091] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c7e2f86-617c-4c4b-8233-a340e945357a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.771230] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6040d6f7-3ec9-4ca3-8b39-df58e48349d6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.787863] env[62405]: DEBUG nova.compute.provider_tree [None req-1d13a535-5f61-4ffa-b78f-a8c9ac45767e tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1620.999802] env[62405]: INFO nova.compute.manager [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 4d59d9fd-23df-4933-97ed-32602e51e9aa] Took 43.77 seconds to build instance. [ 1621.024755] env[62405]: DEBUG nova.compute.manager [None req-584b1867-8b26-47e3-90b1-4fdfb2fd4e54 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] [instance: 3c05b8fc-32b8-42e4-b3c2-95d1cdbc3dc8] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1621.025174] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-584b1867-8b26-47e3-90b1-4fdfb2fd4e54 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] [instance: 3c05b8fc-32b8-42e4-b3c2-95d1cdbc3dc8] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1621.026495] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4260e456-657f-4d2a-b227-a0f51b473f4b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.038393] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-584b1867-8b26-47e3-90b1-4fdfb2fd4e54 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] [instance: 3c05b8fc-32b8-42e4-b3c2-95d1cdbc3dc8] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1621.038760] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-123d3669-29ae-4dd9-b6ba-63aa1f370916 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.046751] env[62405]: DEBUG oslo_vmware.api [None req-584b1867-8b26-47e3-90b1-4fdfb2fd4e54 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Waiting for the task: (returnval){ [ 1621.046751] env[62405]: value = "task-1947118" [ 1621.046751] env[62405]: _type = "Task" [ 1621.046751] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1621.058943] env[62405]: DEBUG oslo_vmware.api [None req-584b1867-8b26-47e3-90b1-4fdfb2fd4e54 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Task: {'id': task-1947118, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1621.095680] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5974aa0f-586f-47ba-95a5-8e34a83e9507 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1621.143310] env[62405]: DEBUG nova.network.neutron [-] [instance: 3f9849b8-6aaa-4d32-b140-207d5b54d68f] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1621.229050] env[62405]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-49225351-98e8-47d2-a925-8aa2e36950b3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.241416] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cc95dd8-e877-4291-b2b9-0f83fbcf0b4c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.279825] env[62405]: DEBUG nova.compute.manager [req-86362e3b-d2f8-4f35-abc2-e363e0777f04 req-861ff349-6792-4629-a104-c3f16be60972 service nova] [instance: 3f9849b8-6aaa-4d32-b140-207d5b54d68f] Detach interface failed, port_id=ec10e8ab-572c-4bfa-810d-befff7776996, reason: Instance 3f9849b8-6aaa-4d32-b140-207d5b54d68f could not be found. {{(pid=62405) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11483}} [ 1621.291264] env[62405]: DEBUG nova.scheduler.client.report [None req-1d13a535-5f61-4ffa-b78f-a8c9ac45767e tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1621.345022] env[62405]: DEBUG oslo_concurrency.lockutils [None req-cc6533f2-a7f0-46b0-bd67-280f9c9edca2 tempest-VolumesAssistedSnapshotsTest-393588668 tempest-VolumesAssistedSnapshotsTest-393588668-project-admin] Acquiring lock "792cd2c8-a67d-4b16-93ab-722fcc8b622d" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1621.345022] env[62405]: DEBUG oslo_concurrency.lockutils [None req-cc6533f2-a7f0-46b0-bd67-280f9c9edca2 tempest-VolumesAssistedSnapshotsTest-393588668 tempest-VolumesAssistedSnapshotsTest-393588668-project-admin] Lock "792cd2c8-a67d-4b16-93ab-722fcc8b622d" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1621.501749] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e48ad1e2-11b0-4313-b24a-13ae22f6b353 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Lock "4d59d9fd-23df-4933-97ed-32602e51e9aa" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 61.942s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1621.556641] env[62405]: DEBUG oslo_vmware.api [None req-584b1867-8b26-47e3-90b1-4fdfb2fd4e54 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Task: {'id': task-1947118, 'name': PowerOffVM_Task, 'duration_secs': 0.229311} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1621.557051] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-584b1867-8b26-47e3-90b1-4fdfb2fd4e54 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] [instance: 3c05b8fc-32b8-42e4-b3c2-95d1cdbc3dc8] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1621.557234] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-584b1867-8b26-47e3-90b1-4fdfb2fd4e54 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] [instance: 3c05b8fc-32b8-42e4-b3c2-95d1cdbc3dc8] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1621.557491] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cd29f171-8b87-460e-a889-79d5a7073be8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.649197] env[62405]: INFO nova.compute.manager [-] [instance: 3f9849b8-6aaa-4d32-b140-207d5b54d68f] Took 1.48 seconds to deallocate network for instance. [ 1621.660215] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-584b1867-8b26-47e3-90b1-4fdfb2fd4e54 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] [instance: 3c05b8fc-32b8-42e4-b3c2-95d1cdbc3dc8] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1621.660493] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-584b1867-8b26-47e3-90b1-4fdfb2fd4e54 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] [instance: 3c05b8fc-32b8-42e4-b3c2-95d1cdbc3dc8] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1621.660687] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-584b1867-8b26-47e3-90b1-4fdfb2fd4e54 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Deleting the datastore file [datastore1] 3c05b8fc-32b8-42e4-b3c2-95d1cdbc3dc8 {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1621.660950] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c1c4520f-ea3e-42b0-a6ba-a6c4c8d19303 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.668425] env[62405]: DEBUG oslo_vmware.api [None req-584b1867-8b26-47e3-90b1-4fdfb2fd4e54 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Waiting for the task: (returnval){ [ 1621.668425] env[62405]: value = "task-1947120" [ 1621.668425] env[62405]: _type = "Task" [ 1621.668425] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1621.676911] env[62405]: DEBUG oslo_vmware.api [None req-584b1867-8b26-47e3-90b1-4fdfb2fd4e54 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Task: {'id': task-1947120, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1621.796969] env[62405]: DEBUG oslo_concurrency.lockutils [None req-1d13a535-5f61-4ffa-b78f-a8c9ac45767e tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.579s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1621.797571] env[62405]: DEBUG nova.compute.manager [None req-1d13a535-5f61-4ffa-b78f-a8c9ac45767e tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] [instance: d5686d7c-a73f-4e02-8726-eab8221a0eae] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1621.800323] env[62405]: DEBUG oslo_concurrency.lockutils [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.385s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1621.801720] env[62405]: INFO nova.compute.claims [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] [instance: 8ea521a4-7d4c-4f0f-9bf6-44b8559eefd6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1621.847623] env[62405]: DEBUG nova.compute.utils [None req-cc6533f2-a7f0-46b0-bd67-280f9c9edca2 tempest-VolumesAssistedSnapshotsTest-393588668 tempest-VolumesAssistedSnapshotsTest-393588668-project-admin] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1622.005327] env[62405]: DEBUG nova.compute.manager [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] [instance: 6213702e-8e39-4342-b62f-2c9495017bf9] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1622.180062] env[62405]: DEBUG oslo_vmware.api [None req-584b1867-8b26-47e3-90b1-4fdfb2fd4e54 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Task: {'id': task-1947120, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.141353} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1622.180352] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-584b1867-8b26-47e3-90b1-4fdfb2fd4e54 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1622.180539] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-584b1867-8b26-47e3-90b1-4fdfb2fd4e54 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] [instance: 3c05b8fc-32b8-42e4-b3c2-95d1cdbc3dc8] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1622.180716] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-584b1867-8b26-47e3-90b1-4fdfb2fd4e54 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] [instance: 3c05b8fc-32b8-42e4-b3c2-95d1cdbc3dc8] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1622.180886] env[62405]: INFO nova.compute.manager [None req-584b1867-8b26-47e3-90b1-4fdfb2fd4e54 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] [instance: 3c05b8fc-32b8-42e4-b3c2-95d1cdbc3dc8] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1622.181138] env[62405]: DEBUG oslo.service.loopingcall [None req-584b1867-8b26-47e3-90b1-4fdfb2fd4e54 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1622.181328] env[62405]: DEBUG nova.compute.manager [-] [instance: 3c05b8fc-32b8-42e4-b3c2-95d1cdbc3dc8] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1622.181423] env[62405]: DEBUG nova.network.neutron [-] [instance: 3c05b8fc-32b8-42e4-b3c2-95d1cdbc3dc8] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1622.209015] env[62405]: INFO nova.compute.manager [None req-e2830b08-1efc-4547-9610-56ed95521a7e tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] [instance: 3f9849b8-6aaa-4d32-b140-207d5b54d68f] Took 0.56 seconds to detach 1 volumes for instance. [ 1622.211751] env[62405]: DEBUG nova.compute.manager [None req-e2830b08-1efc-4547-9610-56ed95521a7e tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] [instance: 3f9849b8-6aaa-4d32-b140-207d5b54d68f] Deleting volume: 09314eff-d5f0-4a4a-a4b2-f7844bc0cf35 {{(pid=62405) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 1622.309427] env[62405]: DEBUG nova.compute.utils [None req-1d13a535-5f61-4ffa-b78f-a8c9ac45767e tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1622.311250] env[62405]: DEBUG nova.compute.manager [None req-1d13a535-5f61-4ffa-b78f-a8c9ac45767e tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] [instance: d5686d7c-a73f-4e02-8726-eab8221a0eae] Not allocating networking since 'none' was specified. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1622.351873] env[62405]: DEBUG oslo_concurrency.lockutils [None req-cc6533f2-a7f0-46b0-bd67-280f9c9edca2 tempest-VolumesAssistedSnapshotsTest-393588668 tempest-VolumesAssistedSnapshotsTest-393588668-project-admin] Lock "792cd2c8-a67d-4b16-93ab-722fcc8b622d" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1622.522840] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1622.760269] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e2830b08-1efc-4547-9610-56ed95521a7e tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1622.812387] env[62405]: DEBUG nova.compute.manager [None req-1d13a535-5f61-4ffa-b78f-a8c9ac45767e tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] [instance: d5686d7c-a73f-4e02-8726-eab8221a0eae] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1622.925965] env[62405]: DEBUG nova.network.neutron [-] [instance: 3c05b8fc-32b8-42e4-b3c2-95d1cdbc3dc8] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1623.065070] env[62405]: DEBUG nova.compute.manager [req-639863cb-7e82-498d-8b53-c5a7deb87725 req-fca91f6d-fcf0-41ff-800d-99843acdb089 service nova] [instance: 3c05b8fc-32b8-42e4-b3c2-95d1cdbc3dc8] Received event network-vif-deleted-580e1cca-cd84-4ad1-a63a-0ace6a2eb2e6 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1623.333973] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9b7c74d-21da-4527-990f-084fc2db05d6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.343434] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b61ccf7f-c881-4745-9e5d-9a18e72e0428 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.373499] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5c4b466-dcd7-4ce2-9ec8-ee6ba2be8d4f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.381916] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8897ce5c-e35a-44cc-947c-25b4fbb511ed {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.395431] env[62405]: DEBUG nova.compute.provider_tree [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1623.428327] env[62405]: DEBUG oslo_concurrency.lockutils [None req-cc6533f2-a7f0-46b0-bd67-280f9c9edca2 tempest-VolumesAssistedSnapshotsTest-393588668 tempest-VolumesAssistedSnapshotsTest-393588668-project-admin] Acquiring lock "792cd2c8-a67d-4b16-93ab-722fcc8b622d" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1623.428581] env[62405]: DEBUG oslo_concurrency.lockutils [None req-cc6533f2-a7f0-46b0-bd67-280f9c9edca2 tempest-VolumesAssistedSnapshotsTest-393588668 tempest-VolumesAssistedSnapshotsTest-393588668-project-admin] Lock "792cd2c8-a67d-4b16-93ab-722fcc8b622d" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1623.428854] env[62405]: INFO nova.compute.manager [None req-cc6533f2-a7f0-46b0-bd67-280f9c9edca2 tempest-VolumesAssistedSnapshotsTest-393588668 tempest-VolumesAssistedSnapshotsTest-393588668-project-admin] [instance: 792cd2c8-a67d-4b16-93ab-722fcc8b622d] Attaching volume 82db6144-aad1-4cf0-a673-a3b49d728cf6 to /dev/sdb [ 1623.430721] env[62405]: INFO nova.compute.manager [-] [instance: 3c05b8fc-32b8-42e4-b3c2-95d1cdbc3dc8] Took 1.25 seconds to deallocate network for instance. [ 1623.462024] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a36361d-1f7f-4456-9149-e5ad39067243 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.471803] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-261210d1-0084-4916-9473-a564b1b79a6e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.490047] env[62405]: DEBUG nova.virt.block_device [None req-cc6533f2-a7f0-46b0-bd67-280f9c9edca2 tempest-VolumesAssistedSnapshotsTest-393588668 tempest-VolumesAssistedSnapshotsTest-393588668-project-admin] [instance: 792cd2c8-a67d-4b16-93ab-722fcc8b622d] Updating existing volume attachment record: 5a18424f-439e-4b48-b07b-90592a1befc2 {{(pid=62405) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1623.824914] env[62405]: DEBUG nova.compute.manager [None req-1d13a535-5f61-4ffa-b78f-a8c9ac45767e tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] [instance: d5686d7c-a73f-4e02-8726-eab8221a0eae] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1623.848026] env[62405]: DEBUG nova.virt.hardware [None req-1d13a535-5f61-4ffa-b78f-a8c9ac45767e tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1623.848220] env[62405]: DEBUG nova.virt.hardware [None req-1d13a535-5f61-4ffa-b78f-a8c9ac45767e tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1623.848381] env[62405]: DEBUG nova.virt.hardware [None req-1d13a535-5f61-4ffa-b78f-a8c9ac45767e tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1623.848592] env[62405]: DEBUG nova.virt.hardware [None req-1d13a535-5f61-4ffa-b78f-a8c9ac45767e tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1623.848751] env[62405]: DEBUG nova.virt.hardware [None req-1d13a535-5f61-4ffa-b78f-a8c9ac45767e tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1623.848912] env[62405]: DEBUG nova.virt.hardware [None req-1d13a535-5f61-4ffa-b78f-a8c9ac45767e tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1623.849141] env[62405]: DEBUG nova.virt.hardware [None req-1d13a535-5f61-4ffa-b78f-a8c9ac45767e tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1623.849317] env[62405]: DEBUG nova.virt.hardware [None req-1d13a535-5f61-4ffa-b78f-a8c9ac45767e tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1623.849511] env[62405]: DEBUG nova.virt.hardware [None req-1d13a535-5f61-4ffa-b78f-a8c9ac45767e tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1623.849776] env[62405]: DEBUG nova.virt.hardware [None req-1d13a535-5f61-4ffa-b78f-a8c9ac45767e tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1623.849995] env[62405]: DEBUG nova.virt.hardware [None req-1d13a535-5f61-4ffa-b78f-a8c9ac45767e tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1623.851065] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5017be4-531b-4efb-9048-2025fbae508c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.859800] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccd26d5c-f6f6-48c2-9963-e81f711ba2cc {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.874698] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-1d13a535-5f61-4ffa-b78f-a8c9ac45767e tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] [instance: d5686d7c-a73f-4e02-8726-eab8221a0eae] Instance VIF info [] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1623.880123] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d13a535-5f61-4ffa-b78f-a8c9ac45767e tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Creating folder: Project (69e8850e05d449b8ad36da8f4ff07380). Parent ref: group-v401284. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1623.880420] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bc70b988-0ed4-4660-b391-1d58f88f0807 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.892331] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-1d13a535-5f61-4ffa-b78f-a8c9ac45767e tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Created folder: Project (69e8850e05d449b8ad36da8f4ff07380) in parent group-v401284. [ 1623.892598] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d13a535-5f61-4ffa-b78f-a8c9ac45767e tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Creating folder: Instances. Parent ref: group-v401411. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1623.892860] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4ca0bfed-84d9-4556-80a0-a409ceb53f6e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.898096] env[62405]: DEBUG nova.scheduler.client.report [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1623.903574] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-1d13a535-5f61-4ffa-b78f-a8c9ac45767e tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Created folder: Instances in parent group-v401411. [ 1623.903798] env[62405]: DEBUG oslo.service.loopingcall [None req-1d13a535-5f61-4ffa-b78f-a8c9ac45767e tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1623.903981] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d5686d7c-a73f-4e02-8726-eab8221a0eae] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1623.904215] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3a8fed45-c738-4cf4-9737-5f29bb0da8f9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.921363] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1623.921363] env[62405]: value = "task-1947127" [ 1623.921363] env[62405]: _type = "Task" [ 1623.921363] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1623.928954] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947127, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1623.938190] env[62405]: DEBUG oslo_concurrency.lockutils [None req-584b1867-8b26-47e3-90b1-4fdfb2fd4e54 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1624.084315] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Acquiring lock "f0ca0d3d-cb2b-467b-a466-c270794055d7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1624.084581] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Lock "f0ca0d3d-cb2b-467b-a466-c270794055d7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1624.403791] env[62405]: DEBUG oslo_concurrency.lockutils [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.603s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1624.404361] env[62405]: DEBUG nova.compute.manager [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] [instance: 8ea521a4-7d4c-4f0f-9bf6-44b8559eefd6] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1624.407450] env[62405]: DEBUG oslo_concurrency.lockutils [None req-34e409f5-4968-4fce-8e2c-e9aa257078cb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 32.687s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1624.407450] env[62405]: DEBUG nova.objects.instance [None req-34e409f5-4968-4fce-8e2c-e9aa257078cb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: e8ed73c3-fb86-42c3-aae6-b0c8d03149ce] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62405) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1624.432041] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947127, 'name': CreateVM_Task, 'duration_secs': 0.282459} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1624.432041] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d5686d7c-a73f-4e02-8726-eab8221a0eae] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1624.432333] env[62405]: DEBUG oslo_concurrency.lockutils [None req-1d13a535-5f61-4ffa-b78f-a8c9ac45767e tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1624.432427] env[62405]: DEBUG oslo_concurrency.lockutils [None req-1d13a535-5f61-4ffa-b78f-a8c9ac45767e tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1624.432816] env[62405]: DEBUG oslo_concurrency.lockutils [None req-1d13a535-5f61-4ffa-b78f-a8c9ac45767e tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1624.433568] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-05056621-31b5-4be0-aef5-1a9f813ff9f1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.439201] env[62405]: DEBUG oslo_vmware.api [None req-1d13a535-5f61-4ffa-b78f-a8c9ac45767e tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Waiting for the task: (returnval){ [ 1624.439201] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52fc687d-3422-5c10-f0c5-67825bdaaf5a" [ 1624.439201] env[62405]: _type = "Task" [ 1624.439201] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1624.450573] env[62405]: DEBUG oslo_vmware.api [None req-1d13a535-5f61-4ffa-b78f-a8c9ac45767e tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52fc687d-3422-5c10-f0c5-67825bdaaf5a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1624.914544] env[62405]: DEBUG nova.compute.utils [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1624.915868] env[62405]: DEBUG nova.compute.manager [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] [instance: 8ea521a4-7d4c-4f0f-9bf6-44b8559eefd6] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1624.916040] env[62405]: DEBUG nova.network.neutron [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] [instance: 8ea521a4-7d4c-4f0f-9bf6-44b8559eefd6] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1624.950626] env[62405]: DEBUG oslo_vmware.api [None req-1d13a535-5f61-4ffa-b78f-a8c9ac45767e tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52fc687d-3422-5c10-f0c5-67825bdaaf5a, 'name': SearchDatastore_Task, 'duration_secs': 0.010858} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1624.950925] env[62405]: DEBUG oslo_concurrency.lockutils [None req-1d13a535-5f61-4ffa-b78f-a8c9ac45767e tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1624.951179] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-1d13a535-5f61-4ffa-b78f-a8c9ac45767e tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] [instance: d5686d7c-a73f-4e02-8726-eab8221a0eae] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1624.951413] env[62405]: DEBUG oslo_concurrency.lockutils [None req-1d13a535-5f61-4ffa-b78f-a8c9ac45767e tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1624.952229] env[62405]: DEBUG oslo_concurrency.lockutils [None req-1d13a535-5f61-4ffa-b78f-a8c9ac45767e tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1624.952229] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d13a535-5f61-4ffa-b78f-a8c9ac45767e tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1624.952229] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-be40c345-dcd4-4c61-880e-d3aaaa053c45 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.955765] env[62405]: DEBUG nova.policy [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '760cfbea1e6746c596a8640f13181617', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f304def06be140fcaf0b652727fc95e7', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1624.961832] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d13a535-5f61-4ffa-b78f-a8c9ac45767e tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1624.962033] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-1d13a535-5f61-4ffa-b78f-a8c9ac45767e tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1624.962762] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4c56689e-6225-4084-8300-9c7926f65554 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.969012] env[62405]: DEBUG oslo_vmware.api [None req-1d13a535-5f61-4ffa-b78f-a8c9ac45767e tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Waiting for the task: (returnval){ [ 1624.969012] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52999d28-d316-4fe3-b736-93ee7401b16d" [ 1624.969012] env[62405]: _type = "Task" [ 1624.969012] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1624.977258] env[62405]: DEBUG oslo_vmware.api [None req-1d13a535-5f61-4ffa-b78f-a8c9ac45767e tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52999d28-d316-4fe3-b736-93ee7401b16d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1625.232922] env[62405]: DEBUG nova.network.neutron [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] [instance: 8ea521a4-7d4c-4f0f-9bf6-44b8559eefd6] Successfully created port: 63b67ea3-5aa4-459f-b475-23d131034cb5 {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1625.417472] env[62405]: DEBUG oslo_concurrency.lockutils [None req-34e409f5-4968-4fce-8e2c-e9aa257078cb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.010s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1625.419518] env[62405]: DEBUG oslo_concurrency.lockutils [None req-10ac8d9f-6d2c-433c-a3e6-e6ec33a82234 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.399s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1625.419726] env[62405]: DEBUG oslo_concurrency.lockutils [None req-10ac8d9f-6d2c-433c-a3e6-e6ec33a82234 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1625.422847] env[62405]: DEBUG oslo_concurrency.lockutils [None req-73596299-093f-48b9-ba92-afb8961301ad tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.257s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1625.423047] env[62405]: DEBUG oslo_concurrency.lockutils [None req-73596299-093f-48b9-ba92-afb8961301ad tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1625.425395] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b644e2f9-cda9-46a3-bf08-3bcc97fc1d90 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.327s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1625.426809] env[62405]: INFO nova.compute.claims [None req-b644e2f9-cda9-46a3-bf08-3bcc97fc1d90 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] [instance: 0f2d81cb-da2a-4664-b9a2-b8c3c38ddc73] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1625.430723] env[62405]: DEBUG nova.compute.manager [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] [instance: 8ea521a4-7d4c-4f0f-9bf6-44b8559eefd6] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1625.455734] env[62405]: INFO nova.scheduler.client.report [None req-10ac8d9f-6d2c-433c-a3e6-e6ec33a82234 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Deleted allocations for instance ca4d11fe-1d0f-468b-a2f4-21c5b84342ab [ 1625.457299] env[62405]: INFO nova.scheduler.client.report [None req-73596299-093f-48b9-ba92-afb8961301ad tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Deleted allocations for instance 14dab775-19b4-4d0d-a7ee-67705f7e45ca [ 1625.479146] env[62405]: DEBUG oslo_vmware.api [None req-1d13a535-5f61-4ffa-b78f-a8c9ac45767e tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52999d28-d316-4fe3-b736-93ee7401b16d, 'name': SearchDatastore_Task, 'duration_secs': 0.00918} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1625.480302] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-25928a9d-8552-4812-90ef-dc4d77d4c074 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.486954] env[62405]: DEBUG oslo_vmware.api [None req-1d13a535-5f61-4ffa-b78f-a8c9ac45767e tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Waiting for the task: (returnval){ [ 1625.486954] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]520d562a-9705-8c6f-6015-b22da03a8a16" [ 1625.486954] env[62405]: _type = "Task" [ 1625.486954] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1625.495381] env[62405]: DEBUG oslo_vmware.api [None req-1d13a535-5f61-4ffa-b78f-a8c9ac45767e tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]520d562a-9705-8c6f-6015-b22da03a8a16, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1625.972858] env[62405]: DEBUG oslo_concurrency.lockutils [None req-10ac8d9f-6d2c-433c-a3e6-e6ec33a82234 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Lock "ca4d11fe-1d0f-468b-a2f4-21c5b84342ab" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.273s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1625.974291] env[62405]: DEBUG oslo_concurrency.lockutils [None req-73596299-093f-48b9-ba92-afb8961301ad tempest-ServerPasswordTestJSON-1820807057 tempest-ServerPasswordTestJSON-1820807057-project-member] Lock "14dab775-19b4-4d0d-a7ee-67705f7e45ca" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.667s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1625.998149] env[62405]: DEBUG oslo_vmware.api [None req-1d13a535-5f61-4ffa-b78f-a8c9ac45767e tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]520d562a-9705-8c6f-6015-b22da03a8a16, 'name': SearchDatastore_Task, 'duration_secs': 0.011092} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1625.998449] env[62405]: DEBUG oslo_concurrency.lockutils [None req-1d13a535-5f61-4ffa-b78f-a8c9ac45767e tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1625.998758] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d13a535-5f61-4ffa-b78f-a8c9ac45767e tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] d5686d7c-a73f-4e02-8726-eab8221a0eae/d5686d7c-a73f-4e02-8726-eab8221a0eae.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1625.999045] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7b39da5b-65f4-4eba-99c5-4cf292a4cfae {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.006712] env[62405]: DEBUG oslo_vmware.api [None req-1d13a535-5f61-4ffa-b78f-a8c9ac45767e tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Waiting for the task: (returnval){ [ 1626.006712] env[62405]: value = "task-1947129" [ 1626.006712] env[62405]: _type = "Task" [ 1626.006712] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1626.014973] env[62405]: DEBUG oslo_vmware.api [None req-1d13a535-5f61-4ffa-b78f-a8c9ac45767e tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Task: {'id': task-1947129, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1626.443637] env[62405]: DEBUG nova.compute.manager [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] [instance: 8ea521a4-7d4c-4f0f-9bf6-44b8559eefd6] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1626.478420] env[62405]: DEBUG nova.virt.hardware [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1626.478878] env[62405]: DEBUG nova.virt.hardware [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1626.479188] env[62405]: DEBUG nova.virt.hardware [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1626.479535] env[62405]: DEBUG nova.virt.hardware [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1626.479810] env[62405]: DEBUG nova.virt.hardware [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1626.480098] env[62405]: DEBUG nova.virt.hardware [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1626.480613] env[62405]: DEBUG nova.virt.hardware [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1626.480931] env[62405]: DEBUG nova.virt.hardware [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1626.481336] env[62405]: DEBUG nova.virt.hardware [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1626.481618] env[62405]: DEBUG nova.virt.hardware [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1626.481954] env[62405]: DEBUG nova.virt.hardware [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1626.483771] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6efcb7d-8260-4ff4-a4d4-690fcb5385b4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.496244] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa5528bb-8f3f-4e0e-9618-a56f85c7cf75 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.522668] env[62405]: DEBUG oslo_vmware.api [None req-1d13a535-5f61-4ffa-b78f-a8c9ac45767e tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Task: {'id': task-1947129, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.448142} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1626.522976] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d13a535-5f61-4ffa-b78f-a8c9ac45767e tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] d5686d7c-a73f-4e02-8726-eab8221a0eae/d5686d7c-a73f-4e02-8726-eab8221a0eae.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1626.523357] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-1d13a535-5f61-4ffa-b78f-a8c9ac45767e tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] [instance: d5686d7c-a73f-4e02-8726-eab8221a0eae] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1626.523645] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8acf00b4-c3aa-434a-aa21-68dc501567bb {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.532881] env[62405]: DEBUG oslo_vmware.api [None req-1d13a535-5f61-4ffa-b78f-a8c9ac45767e tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Waiting for the task: (returnval){ [ 1626.532881] env[62405]: value = "task-1947130" [ 1626.532881] env[62405]: _type = "Task" [ 1626.532881] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1626.541912] env[62405]: DEBUG oslo_vmware.api [None req-1d13a535-5f61-4ffa-b78f-a8c9ac45767e tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Task: {'id': task-1947130, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1626.756995] env[62405]: DEBUG nova.compute.manager [req-18ed2e3e-b276-4f10-a11a-b7535a93d0d7 req-9e76a931-57d8-40c5-b5af-24fee0b93393 service nova] [instance: 8ea521a4-7d4c-4f0f-9bf6-44b8559eefd6] Received event network-vif-plugged-63b67ea3-5aa4-459f-b475-23d131034cb5 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1626.757247] env[62405]: DEBUG oslo_concurrency.lockutils [req-18ed2e3e-b276-4f10-a11a-b7535a93d0d7 req-9e76a931-57d8-40c5-b5af-24fee0b93393 service nova] Acquiring lock "8ea521a4-7d4c-4f0f-9bf6-44b8559eefd6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1626.757463] env[62405]: DEBUG oslo_concurrency.lockutils [req-18ed2e3e-b276-4f10-a11a-b7535a93d0d7 req-9e76a931-57d8-40c5-b5af-24fee0b93393 service nova] Lock "8ea521a4-7d4c-4f0f-9bf6-44b8559eefd6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1626.757630] env[62405]: DEBUG oslo_concurrency.lockutils [req-18ed2e3e-b276-4f10-a11a-b7535a93d0d7 req-9e76a931-57d8-40c5-b5af-24fee0b93393 service nova] Lock "8ea521a4-7d4c-4f0f-9bf6-44b8559eefd6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1626.757798] env[62405]: DEBUG nova.compute.manager [req-18ed2e3e-b276-4f10-a11a-b7535a93d0d7 req-9e76a931-57d8-40c5-b5af-24fee0b93393 service nova] [instance: 8ea521a4-7d4c-4f0f-9bf6-44b8559eefd6] No waiting events found dispatching network-vif-plugged-63b67ea3-5aa4-459f-b475-23d131034cb5 {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1626.757964] env[62405]: WARNING nova.compute.manager [req-18ed2e3e-b276-4f10-a11a-b7535a93d0d7 req-9e76a931-57d8-40c5-b5af-24fee0b93393 service nova] [instance: 8ea521a4-7d4c-4f0f-9bf6-44b8559eefd6] Received unexpected event network-vif-plugged-63b67ea3-5aa4-459f-b475-23d131034cb5 for instance with vm_state building and task_state spawning. [ 1626.962716] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccaa3f20-728e-4917-a283-70b832a7d4b2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.972281] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01c7d8e0-c833-463f-915b-fcdcf4b40fb3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.004830] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-712b5498-6b1a-4751-9e66-bb894609316e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.013200] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6c1c589-87b4-433d-9449-8076021a1cc0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.028623] env[62405]: DEBUG nova.compute.provider_tree [None req-b644e2f9-cda9-46a3-bf08-3bcc97fc1d90 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1627.044317] env[62405]: DEBUG oslo_vmware.api [None req-1d13a535-5f61-4ffa-b78f-a8c9ac45767e tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Task: {'id': task-1947130, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070519} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1627.045419] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-1d13a535-5f61-4ffa-b78f-a8c9ac45767e tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] [instance: d5686d7c-a73f-4e02-8726-eab8221a0eae] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1627.046562] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04aec70e-e807-43ab-8d13-cb399bc57fcc {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.068596] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-1d13a535-5f61-4ffa-b78f-a8c9ac45767e tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] [instance: d5686d7c-a73f-4e02-8726-eab8221a0eae] Reconfiguring VM instance instance-00000027 to attach disk [datastore1] d5686d7c-a73f-4e02-8726-eab8221a0eae/d5686d7c-a73f-4e02-8726-eab8221a0eae.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1627.069160] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8e7f47cb-3f4a-4924-a2ef-6674ea5d875a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.093242] env[62405]: DEBUG oslo_vmware.api [None req-1d13a535-5f61-4ffa-b78f-a8c9ac45767e tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Waiting for the task: (returnval){ [ 1627.093242] env[62405]: value = "task-1947131" [ 1627.093242] env[62405]: _type = "Task" [ 1627.093242] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1627.109492] env[62405]: DEBUG oslo_vmware.api [None req-1d13a535-5f61-4ffa-b78f-a8c9ac45767e tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Task: {'id': task-1947131, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1627.166599] env[62405]: DEBUG nova.network.neutron [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] [instance: 8ea521a4-7d4c-4f0f-9bf6-44b8559eefd6] Successfully updated port: 63b67ea3-5aa4-459f-b475-23d131034cb5 {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1627.448128] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a4569a7b-d444-4557-8de7-7b81f13bbb46 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Acquiring lock "b21dc1e7-dacd-4154-9bc3-0fa3774695a8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1627.448128] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a4569a7b-d444-4557-8de7-7b81f13bbb46 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Lock "b21dc1e7-dacd-4154-9bc3-0fa3774695a8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1627.448128] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a4569a7b-d444-4557-8de7-7b81f13bbb46 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Acquiring lock "b21dc1e7-dacd-4154-9bc3-0fa3774695a8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1627.448705] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a4569a7b-d444-4557-8de7-7b81f13bbb46 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Lock "b21dc1e7-dacd-4154-9bc3-0fa3774695a8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1627.449073] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a4569a7b-d444-4557-8de7-7b81f13bbb46 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Lock "b21dc1e7-dacd-4154-9bc3-0fa3774695a8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1627.452341] env[62405]: INFO nova.compute.manager [None req-a4569a7b-d444-4557-8de7-7b81f13bbb46 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] [instance: b21dc1e7-dacd-4154-9bc3-0fa3774695a8] Terminating instance [ 1627.533977] env[62405]: DEBUG nova.scheduler.client.report [None req-b644e2f9-cda9-46a3-bf08-3bcc97fc1d90 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1627.604892] env[62405]: DEBUG oslo_vmware.api [None req-1d13a535-5f61-4ffa-b78f-a8c9ac45767e tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Task: {'id': task-1947131, 'name': ReconfigVM_Task, 'duration_secs': 0.294571} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1627.605230] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-1d13a535-5f61-4ffa-b78f-a8c9ac45767e tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] [instance: d5686d7c-a73f-4e02-8726-eab8221a0eae] Reconfigured VM instance instance-00000027 to attach disk [datastore1] d5686d7c-a73f-4e02-8726-eab8221a0eae/d5686d7c-a73f-4e02-8726-eab8221a0eae.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1627.605873] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b78b8e50-6157-457c-b6ff-55188e4797bd {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.614457] env[62405]: DEBUG oslo_vmware.api [None req-1d13a535-5f61-4ffa-b78f-a8c9ac45767e tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Waiting for the task: (returnval){ [ 1627.614457] env[62405]: value = "task-1947132" [ 1627.614457] env[62405]: _type = "Task" [ 1627.614457] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1627.627018] env[62405]: DEBUG oslo_vmware.api [None req-1d13a535-5f61-4ffa-b78f-a8c9ac45767e tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Task: {'id': task-1947132, 'name': Rename_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1627.669964] env[62405]: DEBUG oslo_concurrency.lockutils [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Acquiring lock "refresh_cache-8ea521a4-7d4c-4f0f-9bf6-44b8559eefd6" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1627.670249] env[62405]: DEBUG oslo_concurrency.lockutils [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Acquired lock "refresh_cache-8ea521a4-7d4c-4f0f-9bf6-44b8559eefd6" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1627.670393] env[62405]: DEBUG nova.network.neutron [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] [instance: 8ea521a4-7d4c-4f0f-9bf6-44b8559eefd6] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1627.960109] env[62405]: DEBUG nova.compute.manager [None req-a4569a7b-d444-4557-8de7-7b81f13bbb46 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] [instance: b21dc1e7-dacd-4154-9bc3-0fa3774695a8] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1627.960109] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-a4569a7b-d444-4557-8de7-7b81f13bbb46 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] [instance: b21dc1e7-dacd-4154-9bc3-0fa3774695a8] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1627.962052] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff2fde8f-8938-4071-b39b-4466b42e4e99 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.970916] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4569a7b-d444-4557-8de7-7b81f13bbb46 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] [instance: b21dc1e7-dacd-4154-9bc3-0fa3774695a8] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1627.971227] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6fa3aca5-06c2-4063-a102-6955afb4f705 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.979476] env[62405]: DEBUG oslo_vmware.api [None req-a4569a7b-d444-4557-8de7-7b81f13bbb46 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Waiting for the task: (returnval){ [ 1627.979476] env[62405]: value = "task-1947133" [ 1627.979476] env[62405]: _type = "Task" [ 1627.979476] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1627.993432] env[62405]: DEBUG oslo_vmware.api [None req-a4569a7b-d444-4557-8de7-7b81f13bbb46 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Task: {'id': task-1947133, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1628.040339] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b644e2f9-cda9-46a3-bf08-3bcc97fc1d90 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.613s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1628.040339] env[62405]: DEBUG nova.compute.manager [None req-b644e2f9-cda9-46a3-bf08-3bcc97fc1d90 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] [instance: 0f2d81cb-da2a-4664-b9a2-b8c3c38ddc73] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1628.043194] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc6533f2-a7f0-46b0-bd67-280f9c9edca2 tempest-VolumesAssistedSnapshotsTest-393588668 tempest-VolumesAssistedSnapshotsTest-393588668-project-admin] [instance: 792cd2c8-a67d-4b16-93ab-722fcc8b622d] Volume attach. Driver type: vmdk {{(pid=62405) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1628.043765] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc6533f2-a7f0-46b0-bd67-280f9c9edca2 tempest-VolumesAssistedSnapshotsTest-393588668 tempest-VolumesAssistedSnapshotsTest-393588668-project-admin] [instance: 792cd2c8-a67d-4b16-93ab-722fcc8b622d] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-401410', 'volume_id': '82db6144-aad1-4cf0-a673-a3b49d728cf6', 'name': 'volume-82db6144-aad1-4cf0-a673-a3b49d728cf6', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '792cd2c8-a67d-4b16-93ab-722fcc8b622d', 'attached_at': '', 'detached_at': '', 'volume_id': '82db6144-aad1-4cf0-a673-a3b49d728cf6', 'serial': '82db6144-aad1-4cf0-a673-a3b49d728cf6'} {{(pid=62405) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1628.043896] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5ce318a9-2d1f-4297-a62d-a8a0f0e887c1 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.870s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1628.044063] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5ce318a9-2d1f-4297-a62d-a8a0f0e887c1 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1628.047013] env[62405]: DEBUG oslo_concurrency.lockutils [None req-22b89288-dc0d-4ebf-b710-e994341eb245 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.352s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1628.048439] env[62405]: INFO nova.compute.claims [None req-22b89288-dc0d-4ebf-b710-e994341eb245 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] [instance: 1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1628.052654] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d21831d4-5b75-4433-a283-9611f43991cf {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.071690] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdd34390-bcb8-4607-9a64-30af2f9f7102 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.113720] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc6533f2-a7f0-46b0-bd67-280f9c9edca2 tempest-VolumesAssistedSnapshotsTest-393588668 tempest-VolumesAssistedSnapshotsTest-393588668-project-admin] [instance: 792cd2c8-a67d-4b16-93ab-722fcc8b622d] Reconfiguring VM instance instance-00000011 to attach disk [datastore1] volume-82db6144-aad1-4cf0-a673-a3b49d728cf6/volume-82db6144-aad1-4cf0-a673-a3b49d728cf6.vmdk or device None with type thin {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1628.115114] env[62405]: INFO nova.scheduler.client.report [None req-5ce318a9-2d1f-4297-a62d-a8a0f0e887c1 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Deleted allocations for instance e8ed73c3-fb86-42c3-aae6-b0c8d03149ce [ 1628.119074] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-12cc23c9-9c66-4376-a705-5b375256497a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.147687] env[62405]: DEBUG oslo_concurrency.lockutils [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Acquiring lock "34ec55c6-1a7a-4ffa-8efd-9eedd7495d44" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1628.148555] env[62405]: DEBUG oslo_concurrency.lockutils [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Lock "34ec55c6-1a7a-4ffa-8efd-9eedd7495d44" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1628.151460] env[62405]: DEBUG oslo_vmware.api [None req-cc6533f2-a7f0-46b0-bd67-280f9c9edca2 tempest-VolumesAssistedSnapshotsTest-393588668 tempest-VolumesAssistedSnapshotsTest-393588668-project-admin] Waiting for the task: (returnval){ [ 1628.151460] env[62405]: value = "task-1947134" [ 1628.151460] env[62405]: _type = "Task" [ 1628.151460] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1628.157634] env[62405]: DEBUG oslo_vmware.api [None req-1d13a535-5f61-4ffa-b78f-a8c9ac45767e tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Task: {'id': task-1947132, 'name': Rename_Task, 'duration_secs': 0.153913} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1628.163579] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d13a535-5f61-4ffa-b78f-a8c9ac45767e tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] [instance: d5686d7c-a73f-4e02-8726-eab8221a0eae] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1628.163957] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-866665fd-4e7d-4ab3-a763-2c6d69e353a4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.196075] env[62405]: DEBUG oslo_vmware.api [None req-cc6533f2-a7f0-46b0-bd67-280f9c9edca2 tempest-VolumesAssistedSnapshotsTest-393588668 tempest-VolumesAssistedSnapshotsTest-393588668-project-admin] Task: {'id': task-1947134, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1628.197681] env[62405]: DEBUG oslo_vmware.api [None req-1d13a535-5f61-4ffa-b78f-a8c9ac45767e tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Waiting for the task: (returnval){ [ 1628.197681] env[62405]: value = "task-1947135" [ 1628.197681] env[62405]: _type = "Task" [ 1628.197681] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1628.213017] env[62405]: DEBUG oslo_vmware.api [None req-1d13a535-5f61-4ffa-b78f-a8c9ac45767e tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Task: {'id': task-1947135, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1628.214620] env[62405]: DEBUG nova.network.neutron [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] [instance: 8ea521a4-7d4c-4f0f-9bf6-44b8559eefd6] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1628.457809] env[62405]: DEBUG nova.network.neutron [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] [instance: 8ea521a4-7d4c-4f0f-9bf6-44b8559eefd6] Updating instance_info_cache with network_info: [{"id": "63b67ea3-5aa4-459f-b475-23d131034cb5", "address": "fa:16:3e:09:90:f4", "network": {"id": "517d015a-98ff-43dc-aeb9-23c5f8353f29", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-403763073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f304def06be140fcaf0b652727fc95e7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "22b8c642-38ad-4c11-9051-145ab3bc54f2", "external-id": "nsx-vlan-transportzone-247", "segmentation_id": 247, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap63b67ea3-5a", "ovs_interfaceid": "63b67ea3-5aa4-459f-b475-23d131034cb5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1628.490334] env[62405]: DEBUG oslo_vmware.api [None req-a4569a7b-d444-4557-8de7-7b81f13bbb46 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Task: {'id': task-1947133, 'name': PowerOffVM_Task, 'duration_secs': 0.306019} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1628.490604] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4569a7b-d444-4557-8de7-7b81f13bbb46 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] [instance: b21dc1e7-dacd-4154-9bc3-0fa3774695a8] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1628.490774] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-a4569a7b-d444-4557-8de7-7b81f13bbb46 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] [instance: b21dc1e7-dacd-4154-9bc3-0fa3774695a8] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1628.491042] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-de77ed45-00ba-41be-abd8-a60bc35a7de4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.557694] env[62405]: DEBUG nova.compute.utils [None req-b644e2f9-cda9-46a3-bf08-3bcc97fc1d90 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1628.561341] env[62405]: DEBUG nova.compute.manager [None req-b644e2f9-cda9-46a3-bf08-3bcc97fc1d90 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] [instance: 0f2d81cb-da2a-4664-b9a2-b8c3c38ddc73] Not allocating networking since 'none' was specified. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1628.645807] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5ce318a9-2d1f-4297-a62d-a8a0f0e887c1 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Lock "e8ed73c3-fb86-42c3-aae6-b0c8d03149ce" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.006s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1628.668695] env[62405]: DEBUG oslo_vmware.api [None req-cc6533f2-a7f0-46b0-bd67-280f9c9edca2 tempest-VolumesAssistedSnapshotsTest-393588668 tempest-VolumesAssistedSnapshotsTest-393588668-project-admin] Task: {'id': task-1947134, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1628.713528] env[62405]: DEBUG oslo_vmware.api [None req-1d13a535-5f61-4ffa-b78f-a8c9ac45767e tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Task: {'id': task-1947135, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1628.806977] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-a4569a7b-d444-4557-8de7-7b81f13bbb46 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] [instance: b21dc1e7-dacd-4154-9bc3-0fa3774695a8] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1628.806977] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-a4569a7b-d444-4557-8de7-7b81f13bbb46 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] [instance: b21dc1e7-dacd-4154-9bc3-0fa3774695a8] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1628.806977] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4569a7b-d444-4557-8de7-7b81f13bbb46 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Deleting the datastore file [datastore1] b21dc1e7-dacd-4154-9bc3-0fa3774695a8 {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1628.807420] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e0707115-130e-4229-8dfe-0ef723b324e4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.818902] env[62405]: DEBUG oslo_vmware.api [None req-a4569a7b-d444-4557-8de7-7b81f13bbb46 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Waiting for the task: (returnval){ [ 1628.818902] env[62405]: value = "task-1947137" [ 1628.818902] env[62405]: _type = "Task" [ 1628.818902] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1628.831170] env[62405]: DEBUG nova.compute.manager [req-0d21f95a-299c-4521-892b-adf2c0124581 req-170962ff-59c6-4d0d-955b-e0585c007ac0 service nova] [instance: 8ea521a4-7d4c-4f0f-9bf6-44b8559eefd6] Received event network-changed-63b67ea3-5aa4-459f-b475-23d131034cb5 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1628.831299] env[62405]: DEBUG nova.compute.manager [req-0d21f95a-299c-4521-892b-adf2c0124581 req-170962ff-59c6-4d0d-955b-e0585c007ac0 service nova] [instance: 8ea521a4-7d4c-4f0f-9bf6-44b8559eefd6] Refreshing instance network info cache due to event network-changed-63b67ea3-5aa4-459f-b475-23d131034cb5. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1628.831495] env[62405]: DEBUG oslo_concurrency.lockutils [req-0d21f95a-299c-4521-892b-adf2c0124581 req-170962ff-59c6-4d0d-955b-e0585c007ac0 service nova] Acquiring lock "refresh_cache-8ea521a4-7d4c-4f0f-9bf6-44b8559eefd6" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1628.839051] env[62405]: DEBUG oslo_vmware.api [None req-a4569a7b-d444-4557-8de7-7b81f13bbb46 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Task: {'id': task-1947137, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1628.960724] env[62405]: DEBUG oslo_concurrency.lockutils [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Releasing lock "refresh_cache-8ea521a4-7d4c-4f0f-9bf6-44b8559eefd6" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1628.961218] env[62405]: DEBUG nova.compute.manager [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] [instance: 8ea521a4-7d4c-4f0f-9bf6-44b8559eefd6] Instance network_info: |[{"id": "63b67ea3-5aa4-459f-b475-23d131034cb5", "address": "fa:16:3e:09:90:f4", "network": {"id": "517d015a-98ff-43dc-aeb9-23c5f8353f29", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-403763073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f304def06be140fcaf0b652727fc95e7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "22b8c642-38ad-4c11-9051-145ab3bc54f2", "external-id": "nsx-vlan-transportzone-247", "segmentation_id": 247, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap63b67ea3-5a", "ovs_interfaceid": "63b67ea3-5aa4-459f-b475-23d131034cb5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1628.961584] env[62405]: DEBUG oslo_concurrency.lockutils [req-0d21f95a-299c-4521-892b-adf2c0124581 req-170962ff-59c6-4d0d-955b-e0585c007ac0 service nova] Acquired lock "refresh_cache-8ea521a4-7d4c-4f0f-9bf6-44b8559eefd6" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1628.961803] env[62405]: DEBUG nova.network.neutron [req-0d21f95a-299c-4521-892b-adf2c0124581 req-170962ff-59c6-4d0d-955b-e0585c007ac0 service nova] [instance: 8ea521a4-7d4c-4f0f-9bf6-44b8559eefd6] Refreshing network info cache for port 63b67ea3-5aa4-459f-b475-23d131034cb5 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1628.963879] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] [instance: 8ea521a4-7d4c-4f0f-9bf6-44b8559eefd6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:09:90:f4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '22b8c642-38ad-4c11-9051-145ab3bc54f2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '63b67ea3-5aa4-459f-b475-23d131034cb5', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1628.971886] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Creating folder: Project (f304def06be140fcaf0b652727fc95e7). Parent ref: group-v401284. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1628.972881] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1fd77790-9349-4efe-8200-00c861e84bd8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.989507] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Created folder: Project (f304def06be140fcaf0b652727fc95e7) in parent group-v401284. [ 1628.989707] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Creating folder: Instances. Parent ref: group-v401414. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1628.990216] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d9408315-1d92-4f47-8797-6a25498d101e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.005687] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Created folder: Instances in parent group-v401414. [ 1629.006020] env[62405]: DEBUG oslo.service.loopingcall [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1629.006242] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8ea521a4-7d4c-4f0f-9bf6-44b8559eefd6] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1629.006468] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ac0045ac-6503-4ac1-97be-7051d579a486 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.028531] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1629.028531] env[62405]: value = "task-1947140" [ 1629.028531] env[62405]: _type = "Task" [ 1629.028531] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1629.040576] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947140, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1629.062176] env[62405]: DEBUG nova.compute.manager [None req-b644e2f9-cda9-46a3-bf08-3bcc97fc1d90 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] [instance: 0f2d81cb-da2a-4664-b9a2-b8c3c38ddc73] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1629.169864] env[62405]: DEBUG oslo_vmware.api [None req-cc6533f2-a7f0-46b0-bd67-280f9c9edca2 tempest-VolumesAssistedSnapshotsTest-393588668 tempest-VolumesAssistedSnapshotsTest-393588668-project-admin] Task: {'id': task-1947134, 'name': ReconfigVM_Task, 'duration_secs': 0.706255} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1629.170183] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc6533f2-a7f0-46b0-bd67-280f9c9edca2 tempest-VolumesAssistedSnapshotsTest-393588668 tempest-VolumesAssistedSnapshotsTest-393588668-project-admin] [instance: 792cd2c8-a67d-4b16-93ab-722fcc8b622d] Reconfigured VM instance instance-00000011 to attach disk [datastore1] volume-82db6144-aad1-4cf0-a673-a3b49d728cf6/volume-82db6144-aad1-4cf0-a673-a3b49d728cf6.vmdk or device None with type thin {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1629.175312] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ed7609cc-58c5-4c32-9dd7-63742cac2231 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.197871] env[62405]: DEBUG oslo_vmware.api [None req-cc6533f2-a7f0-46b0-bd67-280f9c9edca2 tempest-VolumesAssistedSnapshotsTest-393588668 tempest-VolumesAssistedSnapshotsTest-393588668-project-admin] Waiting for the task: (returnval){ [ 1629.197871] env[62405]: value = "task-1947141" [ 1629.197871] env[62405]: _type = "Task" [ 1629.197871] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1629.216382] env[62405]: DEBUG oslo_vmware.api [None req-cc6533f2-a7f0-46b0-bd67-280f9c9edca2 tempest-VolumesAssistedSnapshotsTest-393588668 tempest-VolumesAssistedSnapshotsTest-393588668-project-admin] Task: {'id': task-1947141, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1629.219590] env[62405]: DEBUG oslo_vmware.api [None req-1d13a535-5f61-4ffa-b78f-a8c9ac45767e tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Task: {'id': task-1947135, 'name': PowerOnVM_Task, 'duration_secs': 0.835459} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1629.223741] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d13a535-5f61-4ffa-b78f-a8c9ac45767e tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] [instance: d5686d7c-a73f-4e02-8726-eab8221a0eae] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1629.223741] env[62405]: INFO nova.compute.manager [None req-1d13a535-5f61-4ffa-b78f-a8c9ac45767e tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] [instance: d5686d7c-a73f-4e02-8726-eab8221a0eae] Took 5.40 seconds to spawn the instance on the hypervisor. [ 1629.223741] env[62405]: DEBUG nova.compute.manager [None req-1d13a535-5f61-4ffa-b78f-a8c9ac45767e tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] [instance: d5686d7c-a73f-4e02-8726-eab8221a0eae] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1629.223924] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e74b802f-a3d2-492a-99e1-725e50db5d33 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.330891] env[62405]: DEBUG oslo_vmware.api [None req-a4569a7b-d444-4557-8de7-7b81f13bbb46 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Task: {'id': task-1947137, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.375249} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1629.333815] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4569a7b-d444-4557-8de7-7b81f13bbb46 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1629.334039] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-a4569a7b-d444-4557-8de7-7b81f13bbb46 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] [instance: b21dc1e7-dacd-4154-9bc3-0fa3774695a8] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1629.334225] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-a4569a7b-d444-4557-8de7-7b81f13bbb46 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] [instance: b21dc1e7-dacd-4154-9bc3-0fa3774695a8] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1629.334526] env[62405]: INFO nova.compute.manager [None req-a4569a7b-d444-4557-8de7-7b81f13bbb46 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] [instance: b21dc1e7-dacd-4154-9bc3-0fa3774695a8] Took 1.38 seconds to destroy the instance on the hypervisor. [ 1629.334864] env[62405]: DEBUG oslo.service.loopingcall [None req-a4569a7b-d444-4557-8de7-7b81f13bbb46 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1629.336144] env[62405]: DEBUG nova.compute.manager [-] [instance: b21dc1e7-dacd-4154-9bc3-0fa3774695a8] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1629.336144] env[62405]: DEBUG nova.network.neutron [-] [instance: b21dc1e7-dacd-4154-9bc3-0fa3774695a8] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1629.543019] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947140, 'name': CreateVM_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1629.653939] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f8e3eeb-153f-472c-b7ba-1e4b29981f5d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.663787] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e29dd8f7-1dce-4005-b2e4-646444b20cde {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.714170] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04fb780b-5876-40d9-8181-68eee356276b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.727581] env[62405]: DEBUG oslo_vmware.api [None req-cc6533f2-a7f0-46b0-bd67-280f9c9edca2 tempest-VolumesAssistedSnapshotsTest-393588668 tempest-VolumesAssistedSnapshotsTest-393588668-project-admin] Task: {'id': task-1947141, 'name': ReconfigVM_Task, 'duration_secs': 0.215946} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1629.728905] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ad501e5-c4a8-4bbc-9f30-6fc449b7990d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.733199] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc6533f2-a7f0-46b0-bd67-280f9c9edca2 tempest-VolumesAssistedSnapshotsTest-393588668 tempest-VolumesAssistedSnapshotsTest-393588668-project-admin] [instance: 792cd2c8-a67d-4b16-93ab-722fcc8b622d] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-401410', 'volume_id': '82db6144-aad1-4cf0-a673-a3b49d728cf6', 'name': 'volume-82db6144-aad1-4cf0-a673-a3b49d728cf6', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '792cd2c8-a67d-4b16-93ab-722fcc8b622d', 'attached_at': '', 'detached_at': '', 'volume_id': '82db6144-aad1-4cf0-a673-a3b49d728cf6', 'serial': '82db6144-aad1-4cf0-a673-a3b49d728cf6'} {{(pid=62405) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1629.751078] env[62405]: DEBUG nova.compute.provider_tree [None req-22b89288-dc0d-4ebf-b710-e994341eb245 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1629.752755] env[62405]: INFO nova.compute.manager [None req-1d13a535-5f61-4ffa-b78f-a8c9ac45767e tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] [instance: d5686d7c-a73f-4e02-8726-eab8221a0eae] Took 42.61 seconds to build instance. [ 1629.886195] env[62405]: DEBUG nova.network.neutron [req-0d21f95a-299c-4521-892b-adf2c0124581 req-170962ff-59c6-4d0d-955b-e0585c007ac0 service nova] [instance: 8ea521a4-7d4c-4f0f-9bf6-44b8559eefd6] Updated VIF entry in instance network info cache for port 63b67ea3-5aa4-459f-b475-23d131034cb5. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1629.886743] env[62405]: DEBUG nova.network.neutron [req-0d21f95a-299c-4521-892b-adf2c0124581 req-170962ff-59c6-4d0d-955b-e0585c007ac0 service nova] [instance: 8ea521a4-7d4c-4f0f-9bf6-44b8559eefd6] Updating instance_info_cache with network_info: [{"id": "63b67ea3-5aa4-459f-b475-23d131034cb5", "address": "fa:16:3e:09:90:f4", "network": {"id": "517d015a-98ff-43dc-aeb9-23c5f8353f29", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-403763073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f304def06be140fcaf0b652727fc95e7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "22b8c642-38ad-4c11-9051-145ab3bc54f2", "external-id": "nsx-vlan-transportzone-247", "segmentation_id": 247, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap63b67ea3-5a", "ovs_interfaceid": "63b67ea3-5aa4-459f-b475-23d131034cb5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1630.048707] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947140, 'name': CreateVM_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1630.075334] env[62405]: DEBUG nova.compute.manager [None req-b644e2f9-cda9-46a3-bf08-3bcc97fc1d90 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] [instance: 0f2d81cb-da2a-4664-b9a2-b8c3c38ddc73] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1630.104260] env[62405]: DEBUG nova.virt.hardware [None req-b644e2f9-cda9-46a3-bf08-3bcc97fc1d90 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1630.104548] env[62405]: DEBUG nova.virt.hardware [None req-b644e2f9-cda9-46a3-bf08-3bcc97fc1d90 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1630.104711] env[62405]: DEBUG nova.virt.hardware [None req-b644e2f9-cda9-46a3-bf08-3bcc97fc1d90 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1630.104897] env[62405]: DEBUG nova.virt.hardware [None req-b644e2f9-cda9-46a3-bf08-3bcc97fc1d90 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1630.108548] env[62405]: DEBUG nova.virt.hardware [None req-b644e2f9-cda9-46a3-bf08-3bcc97fc1d90 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1630.108548] env[62405]: DEBUG nova.virt.hardware [None req-b644e2f9-cda9-46a3-bf08-3bcc97fc1d90 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1630.108548] env[62405]: DEBUG nova.virt.hardware [None req-b644e2f9-cda9-46a3-bf08-3bcc97fc1d90 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1630.108548] env[62405]: DEBUG nova.virt.hardware [None req-b644e2f9-cda9-46a3-bf08-3bcc97fc1d90 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1630.108548] env[62405]: DEBUG nova.virt.hardware [None req-b644e2f9-cda9-46a3-bf08-3bcc97fc1d90 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1630.108803] env[62405]: DEBUG nova.virt.hardware [None req-b644e2f9-cda9-46a3-bf08-3bcc97fc1d90 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1630.108803] env[62405]: DEBUG nova.virt.hardware [None req-b644e2f9-cda9-46a3-bf08-3bcc97fc1d90 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1630.108803] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9076790a-7722-4a39-835f-1d79bb87d037 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.116652] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b2d6db5-5c8f-4903-b821-da99fafe1b2b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.132490] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-b644e2f9-cda9-46a3-bf08-3bcc97fc1d90 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] [instance: 0f2d81cb-da2a-4664-b9a2-b8c3c38ddc73] Instance VIF info [] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1630.138296] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-b644e2f9-cda9-46a3-bf08-3bcc97fc1d90 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Creating folder: Project (35d6e16e9aae4df1af1fd3d6a3166b1b). Parent ref: group-v401284. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1630.138900] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-420f42d6-0569-4622-a256-bb082413a9eb {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.151950] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-b644e2f9-cda9-46a3-bf08-3bcc97fc1d90 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Created folder: Project (35d6e16e9aae4df1af1fd3d6a3166b1b) in parent group-v401284. [ 1630.152608] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-b644e2f9-cda9-46a3-bf08-3bcc97fc1d90 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Creating folder: Instances. Parent ref: group-v401417. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1630.152608] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-16acdff5-0738-4cc6-97a6-01c42b77dc4c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.165296] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-b644e2f9-cda9-46a3-bf08-3bcc97fc1d90 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Created folder: Instances in parent group-v401417. [ 1630.165443] env[62405]: DEBUG oslo.service.loopingcall [None req-b644e2f9-cda9-46a3-bf08-3bcc97fc1d90 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1630.165742] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0f2d81cb-da2a-4664-b9a2-b8c3c38ddc73] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1630.165871] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d9efa1f8-3a33-413b-bea1-40167d784f33 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.190145] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1630.190145] env[62405]: value = "task-1947144" [ 1630.190145] env[62405]: _type = "Task" [ 1630.190145] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1630.198559] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947144, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1630.255024] env[62405]: DEBUG nova.scheduler.client.report [None req-22b89288-dc0d-4ebf-b710-e994341eb245 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1630.259223] env[62405]: DEBUG oslo_concurrency.lockutils [None req-1d13a535-5f61-4ffa-b78f-a8c9ac45767e tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Lock "d5686d7c-a73f-4e02-8726-eab8221a0eae" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 63.833s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1630.391316] env[62405]: DEBUG oslo_concurrency.lockutils [req-0d21f95a-299c-4521-892b-adf2c0124581 req-170962ff-59c6-4d0d-955b-e0585c007ac0 service nova] Releasing lock "refresh_cache-8ea521a4-7d4c-4f0f-9bf6-44b8559eefd6" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1630.460059] env[62405]: DEBUG nova.network.neutron [-] [instance: b21dc1e7-dacd-4154-9bc3-0fa3774695a8] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1630.541085] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947140, 'name': CreateVM_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1630.543894] env[62405]: INFO nova.compute.manager [None req-4a89d3de-d2c4-42d4-b2ca-8e873a196d16 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] [instance: d5686d7c-a73f-4e02-8726-eab8221a0eae] Rebuilding instance [ 1630.595181] env[62405]: DEBUG nova.compute.manager [None req-4a89d3de-d2c4-42d4-b2ca-8e873a196d16 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] [instance: d5686d7c-a73f-4e02-8726-eab8221a0eae] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1630.595770] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6d85b97-0151-4cc1-baae-442fbf42d2ef {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.703741] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947144, 'name': CreateVM_Task, 'duration_secs': 0.300521} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1630.704412] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0f2d81cb-da2a-4664-b9a2-b8c3c38ddc73] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1630.704996] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b644e2f9-cda9-46a3-bf08-3bcc97fc1d90 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1630.705268] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b644e2f9-cda9-46a3-bf08-3bcc97fc1d90 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1630.705789] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b644e2f9-cda9-46a3-bf08-3bcc97fc1d90 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1630.706141] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8b29f455-38fa-42cc-97c0-8e985c1d243d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.713082] env[62405]: DEBUG oslo_vmware.api [None req-b644e2f9-cda9-46a3-bf08-3bcc97fc1d90 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Waiting for the task: (returnval){ [ 1630.713082] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5247e4f3-3620-aab6-b0fb-361b16c06b3f" [ 1630.713082] env[62405]: _type = "Task" [ 1630.713082] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1630.725330] env[62405]: DEBUG oslo_vmware.api [None req-b644e2f9-cda9-46a3-bf08-3bcc97fc1d90 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5247e4f3-3620-aab6-b0fb-361b16c06b3f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1630.760640] env[62405]: DEBUG oslo_concurrency.lockutils [None req-22b89288-dc0d-4ebf-b710-e994341eb245 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.714s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1630.761215] env[62405]: DEBUG nova.compute.manager [None req-22b89288-dc0d-4ebf-b710-e994341eb245 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] [instance: 1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1630.763887] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.484s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1630.765310] env[62405]: INFO nova.compute.claims [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] [instance: c392d6f3-b638-4857-826d-760c38b7d291] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1630.768051] env[62405]: DEBUG nova.compute.manager [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] [instance: 377365a4-7538-4bab-a181-1940e6fb4066] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1630.788166] env[62405]: DEBUG nova.objects.instance [None req-cc6533f2-a7f0-46b0-bd67-280f9c9edca2 tempest-VolumesAssistedSnapshotsTest-393588668 tempest-VolumesAssistedSnapshotsTest-393588668-project-admin] Lazy-loading 'flavor' on Instance uuid 792cd2c8-a67d-4b16-93ab-722fcc8b622d {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1630.858314] env[62405]: DEBUG nova.compute.manager [req-3c9c29cb-d52c-4e5c-94bf-78bcab675f22 req-30b3abbf-5c81-4b2c-8a58-5db6628d31ba service nova] [instance: b21dc1e7-dacd-4154-9bc3-0fa3774695a8] Received event network-vif-deleted-e3b36820-3fc9-4b42-820d-9018b302c322 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1630.962637] env[62405]: INFO nova.compute.manager [-] [instance: b21dc1e7-dacd-4154-9bc3-0fa3774695a8] Took 1.63 seconds to deallocate network for instance. [ 1631.042725] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947140, 'name': CreateVM_Task, 'duration_secs': 1.575963} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1631.042909] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8ea521a4-7d4c-4f0f-9bf6-44b8559eefd6] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1631.043577] env[62405]: DEBUG oslo_concurrency.lockutils [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1631.224932] env[62405]: DEBUG oslo_vmware.api [None req-b644e2f9-cda9-46a3-bf08-3bcc97fc1d90 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5247e4f3-3620-aab6-b0fb-361b16c06b3f, 'name': SearchDatastore_Task, 'duration_secs': 0.011488} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1631.225769] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b644e2f9-cda9-46a3-bf08-3bcc97fc1d90 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1631.225769] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-b644e2f9-cda9-46a3-bf08-3bcc97fc1d90 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] [instance: 0f2d81cb-da2a-4664-b9a2-b8c3c38ddc73] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1631.225911] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b644e2f9-cda9-46a3-bf08-3bcc97fc1d90 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1631.226084] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b644e2f9-cda9-46a3-bf08-3bcc97fc1d90 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1631.226195] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-b644e2f9-cda9-46a3-bf08-3bcc97fc1d90 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1631.226456] env[62405]: DEBUG oslo_concurrency.lockutils [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1631.226765] env[62405]: DEBUG oslo_concurrency.lockutils [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1631.226996] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d5f6e2fc-4b55-4761-a251-41b2717c4a90 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.228866] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a13dd8f4-98b2-490d-8e9c-fcd5d5bdeb73 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.236952] env[62405]: DEBUG oslo_vmware.api [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Waiting for the task: (returnval){ [ 1631.236952] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52959f80-d800-5ffd-5db4-0c770ca2d00f" [ 1631.236952] env[62405]: _type = "Task" [ 1631.236952] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1631.242139] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-b644e2f9-cda9-46a3-bf08-3bcc97fc1d90 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1631.242139] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-b644e2f9-cda9-46a3-bf08-3bcc97fc1d90 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1631.242643] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9cca4b1c-b36a-43ce-9ed7-0596b0df377c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.248625] env[62405]: DEBUG oslo_vmware.api [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52959f80-d800-5ffd-5db4-0c770ca2d00f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1631.251788] env[62405]: DEBUG oslo_vmware.api [None req-b644e2f9-cda9-46a3-bf08-3bcc97fc1d90 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Waiting for the task: (returnval){ [ 1631.251788] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5291b036-3ef5-b409-0d0a-cfe44df6ba76" [ 1631.251788] env[62405]: _type = "Task" [ 1631.251788] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1631.259811] env[62405]: DEBUG oslo_vmware.api [None req-b644e2f9-cda9-46a3-bf08-3bcc97fc1d90 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5291b036-3ef5-b409-0d0a-cfe44df6ba76, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1631.269186] env[62405]: DEBUG nova.compute.utils [None req-22b89288-dc0d-4ebf-b710-e994341eb245 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1631.274145] env[62405]: DEBUG nova.compute.manager [None req-22b89288-dc0d-4ebf-b710-e994341eb245 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] [instance: 1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac] Not allocating networking since 'none' was specified. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1631.292404] env[62405]: DEBUG oslo_concurrency.lockutils [None req-cc6533f2-a7f0-46b0-bd67-280f9c9edca2 tempest-VolumesAssistedSnapshotsTest-393588668 tempest-VolumesAssistedSnapshotsTest-393588668-project-admin] Lock "792cd2c8-a67d-4b16-93ab-722fcc8b622d" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.863s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1631.311541] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1631.469743] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a4569a7b-d444-4557-8de7-7b81f13bbb46 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1631.610927] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a89d3de-d2c4-42d4-b2ca-8e873a196d16 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] [instance: d5686d7c-a73f-4e02-8726-eab8221a0eae] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1631.610927] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-16db8636-810d-4e1b-9b02-53d26bf585bc {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.621335] env[62405]: DEBUG oslo_vmware.api [None req-4a89d3de-d2c4-42d4-b2ca-8e873a196d16 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Waiting for the task: (returnval){ [ 1631.621335] env[62405]: value = "task-1947145" [ 1631.621335] env[62405]: _type = "Task" [ 1631.621335] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1631.631815] env[62405]: DEBUG oslo_vmware.api [None req-4a89d3de-d2c4-42d4-b2ca-8e873a196d16 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Task: {'id': task-1947145, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1631.752785] env[62405]: DEBUG oslo_vmware.api [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52959f80-d800-5ffd-5db4-0c770ca2d00f, 'name': SearchDatastore_Task, 'duration_secs': 0.020234} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1631.756655] env[62405]: DEBUG oslo_concurrency.lockutils [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1631.756981] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] [instance: 8ea521a4-7d4c-4f0f-9bf6-44b8559eefd6] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1631.757219] env[62405]: DEBUG oslo_concurrency.lockutils [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1631.763830] env[62405]: DEBUG oslo_vmware.api [None req-b644e2f9-cda9-46a3-bf08-3bcc97fc1d90 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5291b036-3ef5-b409-0d0a-cfe44df6ba76, 'name': SearchDatastore_Task, 'duration_secs': 0.012004} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1631.764658] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d38862b3-c580-4cb6-82c0-b654343c778d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.770751] env[62405]: DEBUG oslo_vmware.api [None req-b644e2f9-cda9-46a3-bf08-3bcc97fc1d90 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Waiting for the task: (returnval){ [ 1631.770751] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]520e1fc7-bff2-b591-474c-1f6b83276e40" [ 1631.770751] env[62405]: _type = "Task" [ 1631.770751] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1631.773375] env[62405]: DEBUG nova.compute.manager [None req-22b89288-dc0d-4ebf-b710-e994341eb245 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] [instance: 1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1631.783803] env[62405]: DEBUG oslo_vmware.api [None req-b644e2f9-cda9-46a3-bf08-3bcc97fc1d90 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]520e1fc7-bff2-b591-474c-1f6b83276e40, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1632.131559] env[62405]: DEBUG oslo_vmware.api [None req-4a89d3de-d2c4-42d4-b2ca-8e873a196d16 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Task: {'id': task-1947145, 'name': PowerOffVM_Task, 'duration_secs': 0.190995} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1632.133840] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a89d3de-d2c4-42d4-b2ca-8e873a196d16 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] [instance: d5686d7c-a73f-4e02-8726-eab8221a0eae] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1632.134089] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-4a89d3de-d2c4-42d4-b2ca-8e873a196d16 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] [instance: d5686d7c-a73f-4e02-8726-eab8221a0eae] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1632.135192] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc240231-dba1-4dc8-a425-582d7e71b06e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.142888] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-4a89d3de-d2c4-42d4-b2ca-8e873a196d16 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] [instance: d5686d7c-a73f-4e02-8726-eab8221a0eae] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1632.145250] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ecce302f-4e8b-46c7-ba5a-3ff8879c5c54 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.180143] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-4a89d3de-d2c4-42d4-b2ca-8e873a196d16 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] [instance: d5686d7c-a73f-4e02-8726-eab8221a0eae] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1632.180143] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-4a89d3de-d2c4-42d4-b2ca-8e873a196d16 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] [instance: d5686d7c-a73f-4e02-8726-eab8221a0eae] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1632.180143] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a89d3de-d2c4-42d4-b2ca-8e873a196d16 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Deleting the datastore file [datastore1] d5686d7c-a73f-4e02-8726-eab8221a0eae {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1632.180274] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-275c7555-7754-466f-b4ca-55c495929eca {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.187875] env[62405]: DEBUG oslo_vmware.api [None req-4a89d3de-d2c4-42d4-b2ca-8e873a196d16 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Waiting for the task: (returnval){ [ 1632.187875] env[62405]: value = "task-1947147" [ 1632.187875] env[62405]: _type = "Task" [ 1632.187875] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1632.200135] env[62405]: DEBUG oslo_vmware.api [None req-4a89d3de-d2c4-42d4-b2ca-8e873a196d16 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Task: {'id': task-1947147, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1632.233236] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0666e0bf-defa-4aa6-b38e-a424e8cae054 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.241380] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a28c065-d507-4da8-9f75-920f118b8df4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.274874] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3de306c0-023b-4028-973e-3704e17191c3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.287317] env[62405]: DEBUG oslo_vmware.api [None req-b644e2f9-cda9-46a3-bf08-3bcc97fc1d90 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]520e1fc7-bff2-b591-474c-1f6b83276e40, 'name': SearchDatastore_Task, 'duration_secs': 0.013044} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1632.289547] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b644e2f9-cda9-46a3-bf08-3bcc97fc1d90 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1632.289783] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-b644e2f9-cda9-46a3-bf08-3bcc97fc1d90 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 0f2d81cb-da2a-4664-b9a2-b8c3c38ddc73/0f2d81cb-da2a-4664-b9a2-b8c3c38ddc73.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1632.290126] env[62405]: DEBUG oslo_concurrency.lockutils [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1632.290314] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1632.290526] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7f1373ea-ed29-481b-a097-bb4c6b10b5b8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.293629] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3c4ffef-a9f8-4074-ae40-dd133e8aa240 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.297653] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9b629a32-55ed-4741-9bb2-3858d2f8bd60 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.310681] env[62405]: DEBUG nova.compute.provider_tree [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1632.314497] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1632.314705] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1632.315774] env[62405]: DEBUG oslo_vmware.api [None req-b644e2f9-cda9-46a3-bf08-3bcc97fc1d90 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Waiting for the task: (returnval){ [ 1632.315774] env[62405]: value = "task-1947148" [ 1632.315774] env[62405]: _type = "Task" [ 1632.315774] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1632.316171] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fe41de7b-5e1f-46ce-a821-7aa81fd3abe0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.328770] env[62405]: DEBUG oslo_vmware.api [None req-b644e2f9-cda9-46a3-bf08-3bcc97fc1d90 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Task: {'id': task-1947148, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1632.329815] env[62405]: DEBUG oslo_vmware.api [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Waiting for the task: (returnval){ [ 1632.329815] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52d3baeb-64a9-04b9-cac1-1c10b8092e2c" [ 1632.329815] env[62405]: _type = "Task" [ 1632.329815] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1632.343016] env[62405]: DEBUG oslo_vmware.api [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52d3baeb-64a9-04b9-cac1-1c10b8092e2c, 'name': SearchDatastore_Task, 'duration_secs': 0.009892} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1632.343016] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e8605e07-f7dc-4e7e-9dd8-905d5718139a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.346592] env[62405]: DEBUG oslo_vmware.api [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Waiting for the task: (returnval){ [ 1632.346592] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52e28173-34bb-d8ae-f3b3-6e50c4e7f73b" [ 1632.346592] env[62405]: _type = "Task" [ 1632.346592] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1632.357134] env[62405]: DEBUG oslo_vmware.api [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52e28173-34bb-d8ae-f3b3-6e50c4e7f73b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1632.377185] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2a438328-6630-45e8-987f-c29ed9c649a1 tempest-VolumesAssistedSnapshotsTest-393588668 tempest-VolumesAssistedSnapshotsTest-393588668-project-admin] Acquiring lock "792cd2c8-a67d-4b16-93ab-722fcc8b622d" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1632.377568] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2a438328-6630-45e8-987f-c29ed9c649a1 tempest-VolumesAssistedSnapshotsTest-393588668 tempest-VolumesAssistedSnapshotsTest-393588668-project-admin] Lock "792cd2c8-a67d-4b16-93ab-722fcc8b622d" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1632.699145] env[62405]: DEBUG oslo_vmware.api [None req-4a89d3de-d2c4-42d4-b2ca-8e873a196d16 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Task: {'id': task-1947147, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.094231} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1632.699528] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a89d3de-d2c4-42d4-b2ca-8e873a196d16 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1632.699677] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-4a89d3de-d2c4-42d4-b2ca-8e873a196d16 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] [instance: d5686d7c-a73f-4e02-8726-eab8221a0eae] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1632.699855] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-4a89d3de-d2c4-42d4-b2ca-8e873a196d16 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] [instance: d5686d7c-a73f-4e02-8726-eab8221a0eae] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1632.783336] env[62405]: DEBUG nova.compute.manager [None req-22b89288-dc0d-4ebf-b710-e994341eb245 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] [instance: 1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1632.810686] env[62405]: DEBUG nova.virt.hardware [None req-22b89288-dc0d-4ebf-b710-e994341eb245 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1632.810941] env[62405]: DEBUG nova.virt.hardware [None req-22b89288-dc0d-4ebf-b710-e994341eb245 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1632.811100] env[62405]: DEBUG nova.virt.hardware [None req-22b89288-dc0d-4ebf-b710-e994341eb245 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1632.811282] env[62405]: DEBUG nova.virt.hardware [None req-22b89288-dc0d-4ebf-b710-e994341eb245 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1632.811430] env[62405]: DEBUG nova.virt.hardware [None req-22b89288-dc0d-4ebf-b710-e994341eb245 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1632.811572] env[62405]: DEBUG nova.virt.hardware [None req-22b89288-dc0d-4ebf-b710-e994341eb245 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1632.811778] env[62405]: DEBUG nova.virt.hardware [None req-22b89288-dc0d-4ebf-b710-e994341eb245 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1632.811952] env[62405]: DEBUG nova.virt.hardware [None req-22b89288-dc0d-4ebf-b710-e994341eb245 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1632.812128] env[62405]: DEBUG nova.virt.hardware [None req-22b89288-dc0d-4ebf-b710-e994341eb245 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1632.812347] env[62405]: DEBUG nova.virt.hardware [None req-22b89288-dc0d-4ebf-b710-e994341eb245 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1632.812591] env[62405]: DEBUG nova.virt.hardware [None req-22b89288-dc0d-4ebf-b710-e994341eb245 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1632.813462] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17d7159f-8f75-4c6e-885d-3455ff0028fa {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.816903] env[62405]: DEBUG nova.scheduler.client.report [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1632.828301] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2411269-4342-4fa1-9c7f-d149b2f77119 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.834911] env[62405]: DEBUG oslo_vmware.api [None req-b644e2f9-cda9-46a3-bf08-3bcc97fc1d90 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Task: {'id': task-1947148, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.460734} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1632.836033] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-b644e2f9-cda9-46a3-bf08-3bcc97fc1d90 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 0f2d81cb-da2a-4664-b9a2-b8c3c38ddc73/0f2d81cb-da2a-4664-b9a2-b8c3c38ddc73.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1632.836265] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-b644e2f9-cda9-46a3-bf08-3bcc97fc1d90 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] [instance: 0f2d81cb-da2a-4664-b9a2-b8c3c38ddc73] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1632.836455] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2b51aa7c-4f29-4166-956c-f8b3e3bacfaf {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.845968] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-22b89288-dc0d-4ebf-b710-e994341eb245 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] [instance: 1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac] Instance VIF info [] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1632.851409] env[62405]: DEBUG oslo.service.loopingcall [None req-22b89288-dc0d-4ebf-b710-e994341eb245 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1632.852241] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1632.855177] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4e04f149-2d49-4ab9-ac1b-1e35c87e419e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.872577] env[62405]: DEBUG oslo_vmware.api [None req-b644e2f9-cda9-46a3-bf08-3bcc97fc1d90 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Waiting for the task: (returnval){ [ 1632.872577] env[62405]: value = "task-1947149" [ 1632.872577] env[62405]: _type = "Task" [ 1632.872577] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1632.880475] env[62405]: INFO nova.compute.manager [None req-2a438328-6630-45e8-987f-c29ed9c649a1 tempest-VolumesAssistedSnapshotsTest-393588668 tempest-VolumesAssistedSnapshotsTest-393588668-project-admin] [instance: 792cd2c8-a67d-4b16-93ab-722fcc8b622d] Detaching volume 82db6144-aad1-4cf0-a673-a3b49d728cf6 [ 1632.883056] env[62405]: DEBUG oslo_vmware.api [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52e28173-34bb-d8ae-f3b3-6e50c4e7f73b, 'name': SearchDatastore_Task, 'duration_secs': 0.009112} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1632.885451] env[62405]: DEBUG oslo_concurrency.lockutils [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1632.885649] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 8ea521a4-7d4c-4f0f-9bf6-44b8559eefd6/8ea521a4-7d4c-4f0f-9bf6-44b8559eefd6.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1632.888862] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1632.888862] env[62405]: value = "task-1947150" [ 1632.888862] env[62405]: _type = "Task" [ 1632.888862] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1632.888862] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-06150478-9b2b-4d9e-9a40-03d832efba06 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.896170] env[62405]: DEBUG oslo_vmware.api [None req-b644e2f9-cda9-46a3-bf08-3bcc97fc1d90 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Task: {'id': task-1947149, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1632.902862] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947150, 'name': CreateVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1632.904222] env[62405]: DEBUG oslo_vmware.api [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Waiting for the task: (returnval){ [ 1632.904222] env[62405]: value = "task-1947151" [ 1632.904222] env[62405]: _type = "Task" [ 1632.904222] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1632.913128] env[62405]: DEBUG oslo_vmware.api [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Task: {'id': task-1947151, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1632.930691] env[62405]: INFO nova.virt.block_device [None req-2a438328-6630-45e8-987f-c29ed9c649a1 tempest-VolumesAssistedSnapshotsTest-393588668 tempest-VolumesAssistedSnapshotsTest-393588668-project-admin] [instance: 792cd2c8-a67d-4b16-93ab-722fcc8b622d] Attempting to driver detach volume 82db6144-aad1-4cf0-a673-a3b49d728cf6 from mountpoint /dev/sdb [ 1632.930942] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a438328-6630-45e8-987f-c29ed9c649a1 tempest-VolumesAssistedSnapshotsTest-393588668 tempest-VolumesAssistedSnapshotsTest-393588668-project-admin] [instance: 792cd2c8-a67d-4b16-93ab-722fcc8b622d] Volume detach. Driver type: vmdk {{(pid=62405) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1632.931136] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a438328-6630-45e8-987f-c29ed9c649a1 tempest-VolumesAssistedSnapshotsTest-393588668 tempest-VolumesAssistedSnapshotsTest-393588668-project-admin] [instance: 792cd2c8-a67d-4b16-93ab-722fcc8b622d] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-401410', 'volume_id': '82db6144-aad1-4cf0-a673-a3b49d728cf6', 'name': 'volume-82db6144-aad1-4cf0-a673-a3b49d728cf6', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '792cd2c8-a67d-4b16-93ab-722fcc8b622d', 'attached_at': '', 'detached_at': '', 'volume_id': '82db6144-aad1-4cf0-a673-a3b49d728cf6', 'serial': '82db6144-aad1-4cf0-a673-a3b49d728cf6'} {{(pid=62405) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1632.932053] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea53e400-3f41-47dd-ae56-9d007b92bb4c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.954818] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-027d2fa5-d035-49a3-b9e2-c06146767299 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.967091] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed3d8d65-44b8-4ea6-93cb-c5174fa6484a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.988187] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aecd2270-eb7a-4c20-834d-c0d99311a2c5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.003670] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a438328-6630-45e8-987f-c29ed9c649a1 tempest-VolumesAssistedSnapshotsTest-393588668 tempest-VolumesAssistedSnapshotsTest-393588668-project-admin] The volume has not been displaced from its original location: [datastore1] volume-82db6144-aad1-4cf0-a673-a3b49d728cf6/volume-82db6144-aad1-4cf0-a673-a3b49d728cf6.vmdk. No consolidation needed. {{(pid=62405) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1633.008818] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a438328-6630-45e8-987f-c29ed9c649a1 tempest-VolumesAssistedSnapshotsTest-393588668 tempest-VolumesAssistedSnapshotsTest-393588668-project-admin] [instance: 792cd2c8-a67d-4b16-93ab-722fcc8b622d] Reconfiguring VM instance instance-00000011 to detach disk 2001 {{(pid=62405) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1633.010030] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8d80b83a-89da-4660-9d85-bb97e69492df {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.028806] env[62405]: DEBUG oslo_vmware.api [None req-2a438328-6630-45e8-987f-c29ed9c649a1 tempest-VolumesAssistedSnapshotsTest-393588668 tempest-VolumesAssistedSnapshotsTest-393588668-project-admin] Waiting for the task: (returnval){ [ 1633.028806] env[62405]: value = "task-1947152" [ 1633.028806] env[62405]: _type = "Task" [ 1633.028806] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1633.038686] env[62405]: DEBUG oslo_vmware.api [None req-2a438328-6630-45e8-987f-c29ed9c649a1 tempest-VolumesAssistedSnapshotsTest-393588668 tempest-VolumesAssistedSnapshotsTest-393588668-project-admin] Task: {'id': task-1947152, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1633.323057] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.559s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1633.323644] env[62405]: DEBUG nova.compute.manager [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] [instance: c392d6f3-b638-4857-826d-760c38b7d291] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1633.326639] env[62405]: DEBUG oslo_concurrency.lockutils [None req-282b6a3e-f45f-4a50-9f6c-20b5884135cb tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.749s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1633.326816] env[62405]: DEBUG oslo_concurrency.lockutils [None req-282b6a3e-f45f-4a50-9f6c-20b5884135cb tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1633.329505] env[62405]: DEBUG oslo_concurrency.lockutils [None req-18147140-f297-4e54-acf8-9815c9cafa0e tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.577s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1633.329746] env[62405]: DEBUG oslo_concurrency.lockutils [None req-18147140-f297-4e54-acf8-9815c9cafa0e tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1633.331840] env[62405]: DEBUG oslo_concurrency.lockutils [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.582s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1633.333433] env[62405]: INFO nova.compute.claims [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59957a81-5297-43d3-a673-024a53a19116] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1633.361494] env[62405]: INFO nova.scheduler.client.report [None req-282b6a3e-f45f-4a50-9f6c-20b5884135cb tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Deleted allocations for instance fbedaa93-5968-4b42-b93e-201d2b44b32b [ 1633.365338] env[62405]: INFO nova.scheduler.client.report [None req-18147140-f297-4e54-acf8-9815c9cafa0e tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Deleted allocations for instance 900b95b5-fe5a-46c1-909a-f81b82ced0ef [ 1633.386499] env[62405]: DEBUG oslo_vmware.api [None req-b644e2f9-cda9-46a3-bf08-3bcc97fc1d90 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Task: {'id': task-1947149, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068954} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1633.386953] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-b644e2f9-cda9-46a3-bf08-3bcc97fc1d90 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] [instance: 0f2d81cb-da2a-4664-b9a2-b8c3c38ddc73] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1633.388106] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-396f4608-a620-4c78-bdc7-15e7de1b5385 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.413043] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-b644e2f9-cda9-46a3-bf08-3bcc97fc1d90 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] [instance: 0f2d81cb-da2a-4664-b9a2-b8c3c38ddc73] Reconfiguring VM instance instance-00000029 to attach disk [datastore1] 0f2d81cb-da2a-4664-b9a2-b8c3c38ddc73/0f2d81cb-da2a-4664-b9a2-b8c3c38ddc73.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1633.416852] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3069756d-dacb-4077-8c37-b90ca7bb9e66 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.431121] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947150, 'name': CreateVM_Task, 'duration_secs': 0.430331} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1633.434958] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1633.436050] env[62405]: DEBUG oslo_concurrency.lockutils [None req-22b89288-dc0d-4ebf-b710-e994341eb245 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1633.436223] env[62405]: DEBUG oslo_concurrency.lockutils [None req-22b89288-dc0d-4ebf-b710-e994341eb245 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1633.436644] env[62405]: DEBUG oslo_concurrency.lockutils [None req-22b89288-dc0d-4ebf-b710-e994341eb245 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1633.437307] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-97fcfa94-a17d-49d0-8bad-4a0837b77fb7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.442207] env[62405]: DEBUG oslo_vmware.api [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Task: {'id': task-1947151, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.483918} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1633.443725] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 8ea521a4-7d4c-4f0f-9bf6-44b8559eefd6/8ea521a4-7d4c-4f0f-9bf6-44b8559eefd6.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1633.443953] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] [instance: 8ea521a4-7d4c-4f0f-9bf6-44b8559eefd6] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1633.444263] env[62405]: DEBUG oslo_vmware.api [None req-b644e2f9-cda9-46a3-bf08-3bcc97fc1d90 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Waiting for the task: (returnval){ [ 1633.444263] env[62405]: value = "task-1947153" [ 1633.444263] env[62405]: _type = "Task" [ 1633.444263] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1633.444472] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1846157d-482d-4695-9396-70540299d1fd {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.452576] env[62405]: DEBUG oslo_vmware.api [None req-22b89288-dc0d-4ebf-b710-e994341eb245 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Waiting for the task: (returnval){ [ 1633.452576] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5262e2be-308a-2c7f-6856-5eaf4fe78738" [ 1633.452576] env[62405]: _type = "Task" [ 1633.452576] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1633.459365] env[62405]: DEBUG oslo_vmware.api [None req-b644e2f9-cda9-46a3-bf08-3bcc97fc1d90 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Task: {'id': task-1947153, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1633.460909] env[62405]: DEBUG oslo_vmware.api [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Waiting for the task: (returnval){ [ 1633.460909] env[62405]: value = "task-1947154" [ 1633.460909] env[62405]: _type = "Task" [ 1633.460909] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1633.466661] env[62405]: DEBUG oslo_vmware.api [None req-22b89288-dc0d-4ebf-b710-e994341eb245 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5262e2be-308a-2c7f-6856-5eaf4fe78738, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1633.471443] env[62405]: DEBUG oslo_vmware.api [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Task: {'id': task-1947154, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1633.539512] env[62405]: DEBUG oslo_vmware.api [None req-2a438328-6630-45e8-987f-c29ed9c649a1 tempest-VolumesAssistedSnapshotsTest-393588668 tempest-VolumesAssistedSnapshotsTest-393588668-project-admin] Task: {'id': task-1947152, 'name': ReconfigVM_Task, 'duration_secs': 0.291582} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1633.539836] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a438328-6630-45e8-987f-c29ed9c649a1 tempest-VolumesAssistedSnapshotsTest-393588668 tempest-VolumesAssistedSnapshotsTest-393588668-project-admin] [instance: 792cd2c8-a67d-4b16-93ab-722fcc8b622d] Reconfigured VM instance instance-00000011 to detach disk 2001 {{(pid=62405) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1633.544540] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3b990b2b-39e4-44c6-8faf-c3e8a8ec5059 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.561509] env[62405]: DEBUG oslo_vmware.api [None req-2a438328-6630-45e8-987f-c29ed9c649a1 tempest-VolumesAssistedSnapshotsTest-393588668 tempest-VolumesAssistedSnapshotsTest-393588668-project-admin] Waiting for the task: (returnval){ [ 1633.561509] env[62405]: value = "task-1947155" [ 1633.561509] env[62405]: _type = "Task" [ 1633.561509] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1633.570805] env[62405]: DEBUG oslo_vmware.api [None req-2a438328-6630-45e8-987f-c29ed9c649a1 tempest-VolumesAssistedSnapshotsTest-393588668 tempest-VolumesAssistedSnapshotsTest-393588668-project-admin] Task: {'id': task-1947155, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1633.732151] env[62405]: DEBUG nova.virt.hardware [None req-4a89d3de-d2c4-42d4-b2ca-8e873a196d16 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1633.732484] env[62405]: DEBUG nova.virt.hardware [None req-4a89d3de-d2c4-42d4-b2ca-8e873a196d16 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1633.732579] env[62405]: DEBUG nova.virt.hardware [None req-4a89d3de-d2c4-42d4-b2ca-8e873a196d16 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1633.732761] env[62405]: DEBUG nova.virt.hardware [None req-4a89d3de-d2c4-42d4-b2ca-8e873a196d16 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1633.732907] env[62405]: DEBUG nova.virt.hardware [None req-4a89d3de-d2c4-42d4-b2ca-8e873a196d16 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1633.733082] env[62405]: DEBUG nova.virt.hardware [None req-4a89d3de-d2c4-42d4-b2ca-8e873a196d16 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1633.733321] env[62405]: DEBUG nova.virt.hardware [None req-4a89d3de-d2c4-42d4-b2ca-8e873a196d16 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1633.733487] env[62405]: DEBUG nova.virt.hardware [None req-4a89d3de-d2c4-42d4-b2ca-8e873a196d16 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1633.733654] env[62405]: DEBUG nova.virt.hardware [None req-4a89d3de-d2c4-42d4-b2ca-8e873a196d16 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1633.733813] env[62405]: DEBUG nova.virt.hardware [None req-4a89d3de-d2c4-42d4-b2ca-8e873a196d16 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1633.733988] env[62405]: DEBUG nova.virt.hardware [None req-4a89d3de-d2c4-42d4-b2ca-8e873a196d16 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1633.734916] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8699561b-7dd7-40d2-9cb2-260e6d907923 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.743871] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1799431-c45e-4227-887f-cffdaa93e91e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.758250] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-4a89d3de-d2c4-42d4-b2ca-8e873a196d16 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] [instance: d5686d7c-a73f-4e02-8726-eab8221a0eae] Instance VIF info [] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1633.764055] env[62405]: DEBUG oslo.service.loopingcall [None req-4a89d3de-d2c4-42d4-b2ca-8e873a196d16 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1633.764374] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d5686d7c-a73f-4e02-8726-eab8221a0eae] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1633.764626] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0876fa3d-7141-49dd-8431-c6a1029e8925 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.783428] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1633.783428] env[62405]: value = "task-1947156" [ 1633.783428] env[62405]: _type = "Task" [ 1633.783428] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1633.793047] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947156, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1633.839262] env[62405]: DEBUG nova.compute.utils [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1633.843346] env[62405]: DEBUG nova.compute.manager [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] [instance: c392d6f3-b638-4857-826d-760c38b7d291] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1633.843543] env[62405]: DEBUG nova.network.neutron [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] [instance: c392d6f3-b638-4857-826d-760c38b7d291] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1633.877400] env[62405]: DEBUG oslo_concurrency.lockutils [None req-282b6a3e-f45f-4a50-9f6c-20b5884135cb tempest-ImagesOneServerTestJSON-810073092 tempest-ImagesOneServerTestJSON-810073092-project-member] Lock "fbedaa93-5968-4b42-b93e-201d2b44b32b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.860s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1633.879340] env[62405]: DEBUG oslo_concurrency.lockutils [None req-18147140-f297-4e54-acf8-9815c9cafa0e tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Lock "900b95b5-fe5a-46c1-909a-f81b82ced0ef" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.509s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1633.911593] env[62405]: DEBUG nova.policy [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '87eed93e34114804b17db4ceaf3eeea0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '56442ca63108497d97070d582050f97b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1633.957384] env[62405]: DEBUG oslo_vmware.api [None req-b644e2f9-cda9-46a3-bf08-3bcc97fc1d90 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Task: {'id': task-1947153, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1633.970762] env[62405]: DEBUG oslo_vmware.api [None req-22b89288-dc0d-4ebf-b710-e994341eb245 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5262e2be-308a-2c7f-6856-5eaf4fe78738, 'name': SearchDatastore_Task, 'duration_secs': 0.028407} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1633.971583] env[62405]: DEBUG oslo_concurrency.lockutils [None req-22b89288-dc0d-4ebf-b710-e994341eb245 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1633.971846] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-22b89288-dc0d-4ebf-b710-e994341eb245 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] [instance: 1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1633.972111] env[62405]: DEBUG oslo_concurrency.lockutils [None req-22b89288-dc0d-4ebf-b710-e994341eb245 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1633.973044] env[62405]: DEBUG oslo_concurrency.lockutils [None req-22b89288-dc0d-4ebf-b710-e994341eb245 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1633.973044] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-22b89288-dc0d-4ebf-b710-e994341eb245 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1633.973044] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4ab91169-93af-48eb-a709-a54a181d816a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.980823] env[62405]: DEBUG oslo_vmware.api [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Task: {'id': task-1947154, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.219969} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1633.981433] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] [instance: 8ea521a4-7d4c-4f0f-9bf6-44b8559eefd6] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1633.982335] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-627319d1-e938-48bc-b792-aae3bf12d162 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.013296] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] [instance: 8ea521a4-7d4c-4f0f-9bf6-44b8559eefd6] Reconfiguring VM instance instance-00000028 to attach disk [datastore1] 8ea521a4-7d4c-4f0f-9bf6-44b8559eefd6/8ea521a4-7d4c-4f0f-9bf6-44b8559eefd6.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1634.014818] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e6bfc67b-d919-4425-bc5a-26f52d86e41d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.032810] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-22b89288-dc0d-4ebf-b710-e994341eb245 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1634.032810] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-22b89288-dc0d-4ebf-b710-e994341eb245 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1634.033688] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-05c68bb3-77b2-4baf-a456-fbe6d186717f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.751288] env[62405]: DEBUG nova.network.neutron [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] [instance: c392d6f3-b638-4857-826d-760c38b7d291] Successfully created port: e766daac-fbcb-489e-aef5-d97530246eb0 {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1634.755287] env[62405]: DEBUG nova.compute.manager [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] [instance: c392d6f3-b638-4857-826d-760c38b7d291] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1634.779545] env[62405]: DEBUG oslo_vmware.api [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Waiting for the task: (returnval){ [ 1634.779545] env[62405]: value = "task-1947157" [ 1634.779545] env[62405]: _type = "Task" [ 1634.779545] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1634.802039] env[62405]: DEBUG oslo_vmware.api [None req-2a438328-6630-45e8-987f-c29ed9c649a1 tempest-VolumesAssistedSnapshotsTest-393588668 tempest-VolumesAssistedSnapshotsTest-393588668-project-admin] Task: {'id': task-1947155, 'name': ReconfigVM_Task, 'duration_secs': 0.845371} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1634.802039] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947156, 'name': CreateVM_Task, 'duration_secs': 0.336824} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1634.802039] env[62405]: DEBUG oslo_vmware.api [None req-b644e2f9-cda9-46a3-bf08-3bcc97fc1d90 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Task: {'id': task-1947153, 'name': ReconfigVM_Task, 'duration_secs': 0.708045} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1634.802039] env[62405]: DEBUG oslo_vmware.api [None req-22b89288-dc0d-4ebf-b710-e994341eb245 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Waiting for the task: (returnval){ [ 1634.802039] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52bdf3b0-eddc-33ec-0843-7ceb492b3fdd" [ 1634.802039] env[62405]: _type = "Task" [ 1634.802039] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1634.806117] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a438328-6630-45e8-987f-c29ed9c649a1 tempest-VolumesAssistedSnapshotsTest-393588668 tempest-VolumesAssistedSnapshotsTest-393588668-project-admin] [instance: 792cd2c8-a67d-4b16-93ab-722fcc8b622d] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-401410', 'volume_id': '82db6144-aad1-4cf0-a673-a3b49d728cf6', 'name': 'volume-82db6144-aad1-4cf0-a673-a3b49d728cf6', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '792cd2c8-a67d-4b16-93ab-722fcc8b622d', 'attached_at': '', 'detached_at': '', 'volume_id': '82db6144-aad1-4cf0-a673-a3b49d728cf6', 'serial': '82db6144-aad1-4cf0-a673-a3b49d728cf6'} {{(pid=62405) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1634.808171] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d5686d7c-a73f-4e02-8726-eab8221a0eae] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1634.808470] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-b644e2f9-cda9-46a3-bf08-3bcc97fc1d90 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] [instance: 0f2d81cb-da2a-4664-b9a2-b8c3c38ddc73] Reconfigured VM instance instance-00000029 to attach disk [datastore1] 0f2d81cb-da2a-4664-b9a2-b8c3c38ddc73/0f2d81cb-da2a-4664-b9a2-b8c3c38ddc73.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1634.811036] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4a89d3de-d2c4-42d4-b2ca-8e873a196d16 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1634.811189] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4a89d3de-d2c4-42d4-b2ca-8e873a196d16 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1634.811505] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4a89d3de-d2c4-42d4-b2ca-8e873a196d16 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1634.811914] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-53862c3d-423e-4e61-9a11-fe110bb4a3b6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.817156] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e546cb58-eb06-4306-b061-faac313c7f9a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.823036] env[62405]: DEBUG oslo_vmware.api [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Task: {'id': task-1947157, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1634.831246] env[62405]: DEBUG oslo_vmware.api [None req-22b89288-dc0d-4ebf-b710-e994341eb245 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52bdf3b0-eddc-33ec-0843-7ceb492b3fdd, 'name': SearchDatastore_Task, 'duration_secs': 0.011238} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1634.835411] env[62405]: DEBUG oslo_vmware.api [None req-4a89d3de-d2c4-42d4-b2ca-8e873a196d16 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Waiting for the task: (returnval){ [ 1634.835411] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]524b9a2d-d1af-13f8-b637-a4eacfec6423" [ 1634.835411] env[62405]: _type = "Task" [ 1634.835411] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1634.835826] env[62405]: DEBUG oslo_vmware.api [None req-b644e2f9-cda9-46a3-bf08-3bcc97fc1d90 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Waiting for the task: (returnval){ [ 1634.835826] env[62405]: value = "task-1947158" [ 1634.835826] env[62405]: _type = "Task" [ 1634.835826] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1634.836105] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-31f127dc-be6a-4f9d-b94d-d1f58c130307 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.850379] env[62405]: DEBUG oslo_vmware.api [None req-b644e2f9-cda9-46a3-bf08-3bcc97fc1d90 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Task: {'id': task-1947158, 'name': Rename_Task} progress is 10%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1634.854961] env[62405]: DEBUG oslo_vmware.api [None req-22b89288-dc0d-4ebf-b710-e994341eb245 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Waiting for the task: (returnval){ [ 1634.854961] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52eff050-c399-6aff-1c65-68cf4ce99fe3" [ 1634.854961] env[62405]: _type = "Task" [ 1634.854961] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1634.855239] env[62405]: DEBUG oslo_vmware.api [None req-4a89d3de-d2c4-42d4-b2ca-8e873a196d16 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]524b9a2d-d1af-13f8-b637-a4eacfec6423, 'name': SearchDatastore_Task, 'duration_secs': 0.011848} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1634.856040] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4a89d3de-d2c4-42d4-b2ca-8e873a196d16 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1634.856229] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-4a89d3de-d2c4-42d4-b2ca-8e873a196d16 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] [instance: d5686d7c-a73f-4e02-8726-eab8221a0eae] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1634.856510] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4a89d3de-d2c4-42d4-b2ca-8e873a196d16 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1634.871560] env[62405]: DEBUG oslo_vmware.api [None req-22b89288-dc0d-4ebf-b710-e994341eb245 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52eff050-c399-6aff-1c65-68cf4ce99fe3, 'name': SearchDatastore_Task, 'duration_secs': 0.008699} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1634.871909] env[62405]: DEBUG oslo_concurrency.lockutils [None req-22b89288-dc0d-4ebf-b710-e994341eb245 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1634.872249] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-22b89288-dc0d-4ebf-b710-e994341eb245 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac/1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1634.873090] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4a89d3de-d2c4-42d4-b2ca-8e873a196d16 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1634.873355] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a89d3de-d2c4-42d4-b2ca-8e873a196d16 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1634.873648] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ba1ce1dd-4375-4ff4-add2-4a88f3f34fe6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.878766] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d491faa8-03d0-4141-b3ba-7a92773893dd {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.889025] env[62405]: DEBUG oslo_vmware.api [None req-22b89288-dc0d-4ebf-b710-e994341eb245 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Waiting for the task: (returnval){ [ 1634.889025] env[62405]: value = "task-1947159" [ 1634.889025] env[62405]: _type = "Task" [ 1634.889025] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1634.890355] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a89d3de-d2c4-42d4-b2ca-8e873a196d16 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1634.890529] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-4a89d3de-d2c4-42d4-b2ca-8e873a196d16 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1634.894453] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bbbfa988-bc30-4099-b879-5a7fef6efc13 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.905863] env[62405]: DEBUG oslo_vmware.api [None req-22b89288-dc0d-4ebf-b710-e994341eb245 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Task: {'id': task-1947159, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1634.908432] env[62405]: DEBUG oslo_vmware.api [None req-4a89d3de-d2c4-42d4-b2ca-8e873a196d16 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Waiting for the task: (returnval){ [ 1634.908432] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]521ee76d-b19d-82a2-4020-0f649d09af5f" [ 1634.908432] env[62405]: _type = "Task" [ 1634.908432] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1634.917318] env[62405]: DEBUG oslo_vmware.api [None req-4a89d3de-d2c4-42d4-b2ca-8e873a196d16 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]521ee76d-b19d-82a2-4020-0f649d09af5f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1635.296544] env[62405]: DEBUG oslo_vmware.api [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Task: {'id': task-1947157, 'name': ReconfigVM_Task, 'duration_secs': 0.308153} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1635.298513] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] [instance: 8ea521a4-7d4c-4f0f-9bf6-44b8559eefd6] Reconfigured VM instance instance-00000028 to attach disk [datastore1] 8ea521a4-7d4c-4f0f-9bf6-44b8559eefd6/8ea521a4-7d4c-4f0f-9bf6-44b8559eefd6.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1635.298513] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6a918608-9ddf-4f27-97e6-1a9ad5e0fbfc {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.310569] env[62405]: DEBUG oslo_vmware.api [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Waiting for the task: (returnval){ [ 1635.310569] env[62405]: value = "task-1947160" [ 1635.310569] env[62405]: _type = "Task" [ 1635.310569] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1635.323716] env[62405]: DEBUG oslo_vmware.api [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Task: {'id': task-1947160, 'name': Rename_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1635.354244] env[62405]: DEBUG oslo_vmware.api [None req-b644e2f9-cda9-46a3-bf08-3bcc97fc1d90 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Task: {'id': task-1947158, 'name': Rename_Task, 'duration_secs': 0.171314} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1635.355222] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-b644e2f9-cda9-46a3-bf08-3bcc97fc1d90 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] [instance: 0f2d81cb-da2a-4664-b9a2-b8c3c38ddc73] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1635.355222] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2e13e2be-8d97-42cd-b790-3dc137203d12 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.366498] env[62405]: DEBUG oslo_vmware.api [None req-b644e2f9-cda9-46a3-bf08-3bcc97fc1d90 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Waiting for the task: (returnval){ [ 1635.366498] env[62405]: value = "task-1947161" [ 1635.366498] env[62405]: _type = "Task" [ 1635.366498] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1635.381922] env[62405]: DEBUG nova.objects.instance [None req-2a438328-6630-45e8-987f-c29ed9c649a1 tempest-VolumesAssistedSnapshotsTest-393588668 tempest-VolumesAssistedSnapshotsTest-393588668-project-admin] Lazy-loading 'flavor' on Instance uuid 792cd2c8-a67d-4b16-93ab-722fcc8b622d {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1635.384401] env[62405]: DEBUG oslo_vmware.api [None req-b644e2f9-cda9-46a3-bf08-3bcc97fc1d90 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Task: {'id': task-1947161, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1635.397987] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-127d26ad-482f-4ccf-8e42-e77f1225a94b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.408432] env[62405]: DEBUG oslo_vmware.api [None req-22b89288-dc0d-4ebf-b710-e994341eb245 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Task: {'id': task-1947159, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.488266} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1635.414417] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-22b89288-dc0d-4ebf-b710-e994341eb245 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac/1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1635.414766] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-22b89288-dc0d-4ebf-b710-e994341eb245 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] [instance: 1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1635.415346] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2a2e6c50-0d75-4544-98e9-86d301c9bfd7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.418325] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed0dd6ea-8db7-422d-8f62-f75dcc6d3e0a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.456256] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3f67c6b-5c2e-4dc9-b19d-7d86aed42e66 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.459229] env[62405]: DEBUG oslo_vmware.api [None req-4a89d3de-d2c4-42d4-b2ca-8e873a196d16 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]521ee76d-b19d-82a2-4020-0f649d09af5f, 'name': SearchDatastore_Task, 'duration_secs': 0.012556} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1635.459566] env[62405]: DEBUG oslo_vmware.api [None req-22b89288-dc0d-4ebf-b710-e994341eb245 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Waiting for the task: (returnval){ [ 1635.459566] env[62405]: value = "task-1947162" [ 1635.459566] env[62405]: _type = "Task" [ 1635.459566] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1635.461032] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4ac5db62-fead-4db5-a3a3-8ad191dc4a25 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.470676] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2276070e-600e-4abb-ac3c-084f7750fc39 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.484869] env[62405]: DEBUG oslo_vmware.api [None req-22b89288-dc0d-4ebf-b710-e994341eb245 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Task: {'id': task-1947162, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1635.484869] env[62405]: DEBUG oslo_vmware.api [None req-4a89d3de-d2c4-42d4-b2ca-8e873a196d16 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Waiting for the task: (returnval){ [ 1635.484869] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]522abaf2-618c-69a2-036a-60ca02ec2310" [ 1635.484869] env[62405]: _type = "Task" [ 1635.484869] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1635.497758] env[62405]: DEBUG nova.compute.provider_tree [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1635.506752] env[62405]: DEBUG oslo_vmware.api [None req-4a89d3de-d2c4-42d4-b2ca-8e873a196d16 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]522abaf2-618c-69a2-036a-60ca02ec2310, 'name': SearchDatastore_Task, 'duration_secs': 0.011056} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1635.507064] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4a89d3de-d2c4-42d4-b2ca-8e873a196d16 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1635.507420] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a89d3de-d2c4-42d4-b2ca-8e873a196d16 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] d5686d7c-a73f-4e02-8726-eab8221a0eae/d5686d7c-a73f-4e02-8726-eab8221a0eae.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1635.507766] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-25f3b1a7-1e72-4709-a10d-66e052083584 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.516916] env[62405]: DEBUG oslo_vmware.api [None req-4a89d3de-d2c4-42d4-b2ca-8e873a196d16 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Waiting for the task: (returnval){ [ 1635.516916] env[62405]: value = "task-1947163" [ 1635.516916] env[62405]: _type = "Task" [ 1635.516916] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1635.526734] env[62405]: DEBUG oslo_vmware.api [None req-4a89d3de-d2c4-42d4-b2ca-8e873a196d16 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Task: {'id': task-1947163, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1635.777030] env[62405]: DEBUG nova.compute.manager [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] [instance: c392d6f3-b638-4857-826d-760c38b7d291] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1635.811891] env[62405]: DEBUG nova.virt.hardware [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1635.812170] env[62405]: DEBUG nova.virt.hardware [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1635.812338] env[62405]: DEBUG nova.virt.hardware [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1635.812520] env[62405]: DEBUG nova.virt.hardware [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1635.812747] env[62405]: DEBUG nova.virt.hardware [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1635.812811] env[62405]: DEBUG nova.virt.hardware [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1635.813027] env[62405]: DEBUG nova.virt.hardware [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1635.815925] env[62405]: DEBUG nova.virt.hardware [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1635.815925] env[62405]: DEBUG nova.virt.hardware [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1635.815925] env[62405]: DEBUG nova.virt.hardware [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1635.815925] env[62405]: DEBUG nova.virt.hardware [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1635.817767] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97f89350-1209-4019-b475-d459fa5c903f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.827484] env[62405]: DEBUG oslo_vmware.api [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Task: {'id': task-1947160, 'name': Rename_Task, 'duration_secs': 0.178323} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1635.830014] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] [instance: 8ea521a4-7d4c-4f0f-9bf6-44b8559eefd6] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1635.830363] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c1b3b0c6-35ae-413a-a59d-809ac5a5ed5f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.833000] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd2144c4-4490-422e-8452-44f3ce402cb2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.851982] env[62405]: DEBUG oslo_vmware.api [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Waiting for the task: (returnval){ [ 1635.851982] env[62405]: value = "task-1947164" [ 1635.851982] env[62405]: _type = "Task" [ 1635.851982] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1635.862860] env[62405]: DEBUG oslo_vmware.api [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Task: {'id': task-1947164, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1635.878499] env[62405]: DEBUG oslo_vmware.api [None req-b644e2f9-cda9-46a3-bf08-3bcc97fc1d90 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Task: {'id': task-1947161, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1635.975032] env[62405]: DEBUG oslo_vmware.api [None req-22b89288-dc0d-4ebf-b710-e994341eb245 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Task: {'id': task-1947162, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.086227} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1635.975032] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-22b89288-dc0d-4ebf-b710-e994341eb245 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] [instance: 1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1635.975919] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d28d45fa-84fd-4c13-9968-2460876b50a6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.998817] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-22b89288-dc0d-4ebf-b710-e994341eb245 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] [instance: 1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac] Reconfiguring VM instance instance-0000002a to attach disk [datastore1] 1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac/1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1635.999358] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a3e1f8ad-673c-4e8e-80eb-35b01283847b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.014154] env[62405]: DEBUG nova.scheduler.client.report [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1636.028031] env[62405]: DEBUG oslo_vmware.api [None req-22b89288-dc0d-4ebf-b710-e994341eb245 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Waiting for the task: (returnval){ [ 1636.028031] env[62405]: value = "task-1947165" [ 1636.028031] env[62405]: _type = "Task" [ 1636.028031] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1636.029545] env[62405]: DEBUG oslo_vmware.api [None req-4a89d3de-d2c4-42d4-b2ca-8e873a196d16 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Task: {'id': task-1947163, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1636.040921] env[62405]: DEBUG oslo_vmware.api [None req-22b89288-dc0d-4ebf-b710-e994341eb245 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Task: {'id': task-1947165, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1636.059565] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquiring lock "6c6a3974-c87e-47ed-a025-d6221a8decd7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1636.062019] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Lock "6c6a3974-c87e-47ed-a025-d6221a8decd7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1636.325733] env[62405]: DEBUG nova.compute.manager [req-468acb56-4012-4664-a2b5-c257dc1da398 req-2b889f72-3f1d-4aec-8dab-cce87eee62a1 service nova] [instance: c392d6f3-b638-4857-826d-760c38b7d291] Received event network-vif-plugged-e766daac-fbcb-489e-aef5-d97530246eb0 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1636.325944] env[62405]: DEBUG oslo_concurrency.lockutils [req-468acb56-4012-4664-a2b5-c257dc1da398 req-2b889f72-3f1d-4aec-8dab-cce87eee62a1 service nova] Acquiring lock "c392d6f3-b638-4857-826d-760c38b7d291-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1636.326178] env[62405]: DEBUG oslo_concurrency.lockutils [req-468acb56-4012-4664-a2b5-c257dc1da398 req-2b889f72-3f1d-4aec-8dab-cce87eee62a1 service nova] Lock "c392d6f3-b638-4857-826d-760c38b7d291-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1636.326350] env[62405]: DEBUG oslo_concurrency.lockutils [req-468acb56-4012-4664-a2b5-c257dc1da398 req-2b889f72-3f1d-4aec-8dab-cce87eee62a1 service nova] Lock "c392d6f3-b638-4857-826d-760c38b7d291-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1636.326519] env[62405]: DEBUG nova.compute.manager [req-468acb56-4012-4664-a2b5-c257dc1da398 req-2b889f72-3f1d-4aec-8dab-cce87eee62a1 service nova] [instance: c392d6f3-b638-4857-826d-760c38b7d291] No waiting events found dispatching network-vif-plugged-e766daac-fbcb-489e-aef5-d97530246eb0 {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1636.326685] env[62405]: WARNING nova.compute.manager [req-468acb56-4012-4664-a2b5-c257dc1da398 req-2b889f72-3f1d-4aec-8dab-cce87eee62a1 service nova] [instance: c392d6f3-b638-4857-826d-760c38b7d291] Received unexpected event network-vif-plugged-e766daac-fbcb-489e-aef5-d97530246eb0 for instance with vm_state building and task_state spawning. [ 1636.363097] env[62405]: DEBUG oslo_vmware.api [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Task: {'id': task-1947164, 'name': PowerOnVM_Task} progress is 1%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1636.377608] env[62405]: DEBUG oslo_vmware.api [None req-b644e2f9-cda9-46a3-bf08-3bcc97fc1d90 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Task: {'id': task-1947161, 'name': PowerOnVM_Task, 'duration_secs': 0.55116} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1636.377874] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-b644e2f9-cda9-46a3-bf08-3bcc97fc1d90 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] [instance: 0f2d81cb-da2a-4664-b9a2-b8c3c38ddc73] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1636.378193] env[62405]: INFO nova.compute.manager [None req-b644e2f9-cda9-46a3-bf08-3bcc97fc1d90 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] [instance: 0f2d81cb-da2a-4664-b9a2-b8c3c38ddc73] Took 6.30 seconds to spawn the instance on the hypervisor. [ 1636.378399] env[62405]: DEBUG nova.compute.manager [None req-b644e2f9-cda9-46a3-bf08-3bcc97fc1d90 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] [instance: 0f2d81cb-da2a-4664-b9a2-b8c3c38ddc73] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1636.379177] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba0a0114-02c9-4df1-ae4d-a352eefff067 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.392481] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2a438328-6630-45e8-987f-c29ed9c649a1 tempest-VolumesAssistedSnapshotsTest-393588668 tempest-VolumesAssistedSnapshotsTest-393588668-project-admin] Lock "792cd2c8-a67d-4b16-93ab-722fcc8b622d" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 4.013s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1636.424604] env[62405]: DEBUG nova.network.neutron [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] [instance: c392d6f3-b638-4857-826d-760c38b7d291] Successfully updated port: e766daac-fbcb-489e-aef5-d97530246eb0 {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1636.520096] env[62405]: DEBUG oslo_concurrency.lockutils [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.188s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1636.521110] env[62405]: DEBUG nova.compute.manager [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59957a81-5297-43d3-a673-024a53a19116] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1636.524728] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b1bf8a2f-d9f3-4a2e-bf6f-b6bc42142216 tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.609s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1636.525071] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b1bf8a2f-d9f3-4a2e-bf6f-b6bc42142216 tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1636.527492] env[62405]: DEBUG oslo_concurrency.lockutils [None req-cde06c7d-9666-40ff-a9a7-26f752146d74 tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.346s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1636.527686] env[62405]: DEBUG oslo_concurrency.lockutils [None req-cde06c7d-9666-40ff-a9a7-26f752146d74 tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1636.529472] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.699s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1636.530884] env[62405]: INFO nova.compute.claims [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1636.546917] env[62405]: DEBUG oslo_vmware.api [None req-22b89288-dc0d-4ebf-b710-e994341eb245 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Task: {'id': task-1947165, 'name': ReconfigVM_Task, 'duration_secs': 0.278591} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1636.550996] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-22b89288-dc0d-4ebf-b710-e994341eb245 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] [instance: 1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac] Reconfigured VM instance instance-0000002a to attach disk [datastore1] 1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac/1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1636.552169] env[62405]: DEBUG oslo_vmware.api [None req-4a89d3de-d2c4-42d4-b2ca-8e873a196d16 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Task: {'id': task-1947163, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.520485} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1636.552169] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-558020ee-426f-4100-9cb3-6814ea67ea6b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.553291] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a89d3de-d2c4-42d4-b2ca-8e873a196d16 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] d5686d7c-a73f-4e02-8726-eab8221a0eae/d5686d7c-a73f-4e02-8726-eab8221a0eae.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1636.553499] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-4a89d3de-d2c4-42d4-b2ca-8e873a196d16 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] [instance: d5686d7c-a73f-4e02-8726-eab8221a0eae] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1636.553751] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-be40e062-1d5d-49f5-ac3e-ec5a9e0b8f34 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.562724] env[62405]: DEBUG oslo_vmware.api [None req-4a89d3de-d2c4-42d4-b2ca-8e873a196d16 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Waiting for the task: (returnval){ [ 1636.562724] env[62405]: value = "task-1947167" [ 1636.562724] env[62405]: _type = "Task" [ 1636.562724] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1636.564631] env[62405]: DEBUG oslo_vmware.api [None req-22b89288-dc0d-4ebf-b710-e994341eb245 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Waiting for the task: (returnval){ [ 1636.564631] env[62405]: value = "task-1947166" [ 1636.564631] env[62405]: _type = "Task" [ 1636.564631] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1636.570307] env[62405]: INFO nova.scheduler.client.report [None req-b1bf8a2f-d9f3-4a2e-bf6f-b6bc42142216 tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Deleted allocations for instance a73579d1-8647-49fe-98ce-0baffd1a558f [ 1636.575848] env[62405]: INFO nova.scheduler.client.report [None req-cde06c7d-9666-40ff-a9a7-26f752146d74 tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Deleted allocations for instance 9e73e2ab-1eac-4aca-905f-a8391d3f5a9b [ 1636.589041] env[62405]: DEBUG oslo_vmware.api [None req-4a89d3de-d2c4-42d4-b2ca-8e873a196d16 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Task: {'id': task-1947167, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1636.592646] env[62405]: DEBUG oslo_vmware.api [None req-22b89288-dc0d-4ebf-b710-e994341eb245 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Task: {'id': task-1947166, 'name': Rename_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1636.865274] env[62405]: DEBUG oslo_vmware.api [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Task: {'id': task-1947164, 'name': PowerOnVM_Task} progress is 64%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1636.899306] env[62405]: INFO nova.compute.manager [None req-b644e2f9-cda9-46a3-bf08-3bcc97fc1d90 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] [instance: 0f2d81cb-da2a-4664-b9a2-b8c3c38ddc73] Took 42.82 seconds to build instance. [ 1636.927673] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Acquiring lock "refresh_cache-c392d6f3-b638-4857-826d-760c38b7d291" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1636.928345] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Acquired lock "refresh_cache-c392d6f3-b638-4857-826d-760c38b7d291" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1636.928599] env[62405]: DEBUG nova.network.neutron [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] [instance: c392d6f3-b638-4857-826d-760c38b7d291] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1637.041471] env[62405]: DEBUG nova.compute.utils [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1637.041471] env[62405]: DEBUG nova.compute.manager [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59957a81-5297-43d3-a673-024a53a19116] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1637.041471] env[62405]: DEBUG nova.network.neutron [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59957a81-5297-43d3-a673-024a53a19116] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1637.085920] env[62405]: DEBUG oslo_vmware.api [None req-4a89d3de-d2c4-42d4-b2ca-8e873a196d16 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Task: {'id': task-1947167, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.162878} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1637.090189] env[62405]: DEBUG oslo_vmware.api [None req-22b89288-dc0d-4ebf-b710-e994341eb245 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Task: {'id': task-1947166, 'name': Rename_Task, 'duration_secs': 0.148759} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1637.090638] env[62405]: DEBUG oslo_concurrency.lockutils [None req-cde06c7d-9666-40ff-a9a7-26f752146d74 tempest-ServerDiagnosticsTest-589347912 tempest-ServerDiagnosticsTest-589347912-project-member] Lock "9e73e2ab-1eac-4aca-905f-a8391d3f5a9b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.483s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1637.091517] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-4a89d3de-d2c4-42d4-b2ca-8e873a196d16 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] [instance: d5686d7c-a73f-4e02-8726-eab8221a0eae] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1637.095022] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-22b89288-dc0d-4ebf-b710-e994341eb245 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] [instance: 1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1637.095022] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b1bf8a2f-d9f3-4a2e-bf6f-b6bc42142216 tempest-ServersAaction247Test-1478642496 tempest-ServersAaction247Test-1478642496-project-member] Lock "a73579d1-8647-49fe-98ce-0baffd1a558f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.690s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1637.095022] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ff74c58-4b6f-4564-b7bf-c7eaf6a05585 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.096537] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-289b7e99-1a56-4669-bdc0-50ef1cd80109 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.118221] env[62405]: DEBUG oslo_vmware.api [None req-22b89288-dc0d-4ebf-b710-e994341eb245 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Waiting for the task: (returnval){ [ 1637.118221] env[62405]: value = "task-1947169" [ 1637.118221] env[62405]: _type = "Task" [ 1637.118221] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1637.127175] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-4a89d3de-d2c4-42d4-b2ca-8e873a196d16 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] [instance: d5686d7c-a73f-4e02-8726-eab8221a0eae] Reconfiguring VM instance instance-00000027 to attach disk [datastore1] d5686d7c-a73f-4e02-8726-eab8221a0eae/d5686d7c-a73f-4e02-8726-eab8221a0eae.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1637.128632] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d9e9d2f0-4ba4-44c5-bbe8-fd5c041ec5a4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.145412] env[62405]: DEBUG nova.policy [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6472af0b6f6240f297f7f137cde41929', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bb1da47e8b1a400fab7817d9e6b282ed', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1637.157068] env[62405]: DEBUG oslo_vmware.api [None req-22b89288-dc0d-4ebf-b710-e994341eb245 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Task: {'id': task-1947169, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1637.158944] env[62405]: DEBUG oslo_vmware.api [None req-4a89d3de-d2c4-42d4-b2ca-8e873a196d16 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Waiting for the task: (returnval){ [ 1637.158944] env[62405]: value = "task-1947170" [ 1637.158944] env[62405]: _type = "Task" [ 1637.158944] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1637.169221] env[62405]: DEBUG oslo_vmware.api [None req-4a89d3de-d2c4-42d4-b2ca-8e873a196d16 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Task: {'id': task-1947170, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1637.366243] env[62405]: DEBUG oslo_vmware.api [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Task: {'id': task-1947164, 'name': PowerOnVM_Task, 'duration_secs': 1.257229} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1637.366778] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] [instance: 8ea521a4-7d4c-4f0f-9bf6-44b8559eefd6] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1637.367179] env[62405]: INFO nova.compute.manager [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] [instance: 8ea521a4-7d4c-4f0f-9bf6-44b8559eefd6] Took 10.92 seconds to spawn the instance on the hypervisor. [ 1637.367463] env[62405]: DEBUG nova.compute.manager [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] [instance: 8ea521a4-7d4c-4f0f-9bf6-44b8559eefd6] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1637.368470] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1d13fd9-53da-43ef-87f5-f83f58b71bc8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.402090] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b644e2f9-cda9-46a3-bf08-3bcc97fc1d90 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Lock "0f2d81cb-da2a-4664-b9a2-b8c3c38ddc73" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 63.694s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1637.487138] env[62405]: DEBUG nova.network.neutron [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] [instance: c392d6f3-b638-4857-826d-760c38b7d291] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1637.546067] env[62405]: DEBUG nova.compute.manager [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59957a81-5297-43d3-a673-024a53a19116] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1637.641928] env[62405]: DEBUG oslo_vmware.api [None req-22b89288-dc0d-4ebf-b710-e994341eb245 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Task: {'id': task-1947169, 'name': PowerOnVM_Task, 'duration_secs': 0.464394} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1637.642302] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-22b89288-dc0d-4ebf-b710-e994341eb245 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] [instance: 1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1637.642661] env[62405]: INFO nova.compute.manager [None req-22b89288-dc0d-4ebf-b710-e994341eb245 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] [instance: 1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac] Took 4.86 seconds to spawn the instance on the hypervisor. [ 1637.646021] env[62405]: DEBUG nova.compute.manager [None req-22b89288-dc0d-4ebf-b710-e994341eb245 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] [instance: 1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1637.646021] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a4b85c3-10a8-4869-a5bb-81d3c1340ad5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.672677] env[62405]: DEBUG oslo_vmware.api [None req-4a89d3de-d2c4-42d4-b2ca-8e873a196d16 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Task: {'id': task-1947170, 'name': ReconfigVM_Task, 'duration_secs': 0.323813} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1637.673221] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-4a89d3de-d2c4-42d4-b2ca-8e873a196d16 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] [instance: d5686d7c-a73f-4e02-8726-eab8221a0eae] Reconfigured VM instance instance-00000027 to attach disk [datastore1] d5686d7c-a73f-4e02-8726-eab8221a0eae/d5686d7c-a73f-4e02-8726-eab8221a0eae.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1637.674083] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-baef3329-b596-4cc3-b831-a0a385eb15f8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.686111] env[62405]: DEBUG oslo_vmware.api [None req-4a89d3de-d2c4-42d4-b2ca-8e873a196d16 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Waiting for the task: (returnval){ [ 1637.686111] env[62405]: value = "task-1947171" [ 1637.686111] env[62405]: _type = "Task" [ 1637.686111] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1637.687412] env[62405]: DEBUG nova.network.neutron [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] [instance: c392d6f3-b638-4857-826d-760c38b7d291] Updating instance_info_cache with network_info: [{"id": "e766daac-fbcb-489e-aef5-d97530246eb0", "address": "fa:16:3e:bf:a1:1e", "network": {"id": "4a1adf8e-9b11-47cf-a09e-910b0fd2b5ed", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-2001588294-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "56442ca63108497d97070d582050f97b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee9ce73d-4ee8-4b28-b7d3-3a5735039627", "external-id": "cl2-zone-465", "segmentation_id": 465, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape766daac-fb", "ovs_interfaceid": "e766daac-fbcb-489e-aef5-d97530246eb0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1637.696184] env[62405]: DEBUG oslo_vmware.api [None req-4a89d3de-d2c4-42d4-b2ca-8e873a196d16 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Task: {'id': task-1947171, 'name': Rename_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1637.765794] env[62405]: DEBUG nova.network.neutron [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59957a81-5297-43d3-a673-024a53a19116] Successfully created port: 740acbcf-c471-4523-a1ba-a92cc67c2990 {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1637.891604] env[62405]: INFO nova.compute.manager [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] [instance: 8ea521a4-7d4c-4f0f-9bf6-44b8559eefd6] Took 48.51 seconds to build instance. [ 1637.907592] env[62405]: DEBUG nova.compute.manager [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: f0ca0d3d-cb2b-467b-a466-c270794055d7] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1638.166790] env[62405]: INFO nova.compute.manager [None req-22b89288-dc0d-4ebf-b710-e994341eb245 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] [instance: 1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac] Took 34.50 seconds to build instance. [ 1638.184461] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7a09aa7-fcc6-4b08-9e5d-f764138c026d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.193816] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Releasing lock "refresh_cache-c392d6f3-b638-4857-826d-760c38b7d291" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1638.193816] env[62405]: DEBUG nova.compute.manager [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] [instance: c392d6f3-b638-4857-826d-760c38b7d291] Instance network_info: |[{"id": "e766daac-fbcb-489e-aef5-d97530246eb0", "address": "fa:16:3e:bf:a1:1e", "network": {"id": "4a1adf8e-9b11-47cf-a09e-910b0fd2b5ed", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-2001588294-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "56442ca63108497d97070d582050f97b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee9ce73d-4ee8-4b28-b7d3-3a5735039627", "external-id": "cl2-zone-465", "segmentation_id": 465, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape766daac-fb", "ovs_interfaceid": "e766daac-fbcb-489e-aef5-d97530246eb0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1638.194564] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] [instance: c392d6f3-b638-4857-826d-760c38b7d291] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bf:a1:1e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ee9ce73d-4ee8-4b28-b7d3-3a5735039627', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e766daac-fbcb-489e-aef5-d97530246eb0', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1638.203911] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Creating folder: Project (56442ca63108497d97070d582050f97b). Parent ref: group-v401284. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1638.205577] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b7293c97-5127-4ddd-8368-590f6444d3f5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.211542] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a92b306-330d-4de1-8a9f-e5faf391ede9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.215191] env[62405]: DEBUG oslo_vmware.api [None req-4a89d3de-d2c4-42d4-b2ca-8e873a196d16 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Task: {'id': task-1947171, 'name': Rename_Task, 'duration_secs': 0.244816} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1638.216179] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a89d3de-d2c4-42d4-b2ca-8e873a196d16 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] [instance: d5686d7c-a73f-4e02-8726-eab8221a0eae] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1638.216679] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fb6b9a18-6759-4478-8848-325d1b54805e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.246920] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9258e480-4a31-46bf-99b7-0d35547c77cf {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.250263] env[62405]: DEBUG oslo_vmware.api [None req-4a89d3de-d2c4-42d4-b2ca-8e873a196d16 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Waiting for the task: (returnval){ [ 1638.250263] env[62405]: value = "task-1947173" [ 1638.250263] env[62405]: _type = "Task" [ 1638.250263] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1638.250637] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Created folder: Project (56442ca63108497d97070d582050f97b) in parent group-v401284. [ 1638.250839] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Creating folder: Instances. Parent ref: group-v401422. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1638.251572] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ce474fff-059c-4afc-848e-ac32a7ae05f3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.260232] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8823814b-595e-4eda-bd3c-689079c60bc9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.268157] env[62405]: DEBUG oslo_vmware.api [None req-4a89d3de-d2c4-42d4-b2ca-8e873a196d16 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Task: {'id': task-1947173, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1638.270510] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Created folder: Instances in parent group-v401422. [ 1638.270858] env[62405]: DEBUG oslo.service.loopingcall [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1638.271084] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c392d6f3-b638-4857-826d-760c38b7d291] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1638.271304] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-019cf777-ec6e-424e-9ca1-3a58d0e4fba5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.295598] env[62405]: DEBUG nova.compute.provider_tree [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1638.303571] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1638.303571] env[62405]: value = "task-1947175" [ 1638.303571] env[62405]: _type = "Task" [ 1638.303571] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1638.315639] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947175, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1638.395753] env[62405]: DEBUG oslo_concurrency.lockutils [None req-79cde477-0a5f-4f3d-a705-54b2c3e03776 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Lock "8ea521a4-7d4c-4f0f-9bf6-44b8559eefd6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 71.897s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1638.436427] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1638.499266] env[62405]: DEBUG nova.compute.manager [req-d2fd9b4a-e781-40f7-a4b7-c235a11ba62f req-dd5edba9-cf36-47dc-9620-5f8185629e74 service nova] [instance: c392d6f3-b638-4857-826d-760c38b7d291] Received event network-changed-e766daac-fbcb-489e-aef5-d97530246eb0 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1638.499266] env[62405]: DEBUG nova.compute.manager [req-d2fd9b4a-e781-40f7-a4b7-c235a11ba62f req-dd5edba9-cf36-47dc-9620-5f8185629e74 service nova] [instance: c392d6f3-b638-4857-826d-760c38b7d291] Refreshing instance network info cache due to event network-changed-e766daac-fbcb-489e-aef5-d97530246eb0. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1638.499266] env[62405]: DEBUG oslo_concurrency.lockutils [req-d2fd9b4a-e781-40f7-a4b7-c235a11ba62f req-dd5edba9-cf36-47dc-9620-5f8185629e74 service nova] Acquiring lock "refresh_cache-c392d6f3-b638-4857-826d-760c38b7d291" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1638.499266] env[62405]: DEBUG oslo_concurrency.lockutils [req-d2fd9b4a-e781-40f7-a4b7-c235a11ba62f req-dd5edba9-cf36-47dc-9620-5f8185629e74 service nova] Acquired lock "refresh_cache-c392d6f3-b638-4857-826d-760c38b7d291" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1638.499266] env[62405]: DEBUG nova.network.neutron [req-d2fd9b4a-e781-40f7-a4b7-c235a11ba62f req-dd5edba9-cf36-47dc-9620-5f8185629e74 service nova] [instance: c392d6f3-b638-4857-826d-760c38b7d291] Refreshing network info cache for port e766daac-fbcb-489e-aef5-d97530246eb0 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1638.561718] env[62405]: DEBUG nova.compute.manager [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59957a81-5297-43d3-a673-024a53a19116] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1638.591839] env[62405]: DEBUG nova.virt.hardware [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1638.591839] env[62405]: DEBUG nova.virt.hardware [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1638.591839] env[62405]: DEBUG nova.virt.hardware [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1638.592397] env[62405]: DEBUG nova.virt.hardware [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1638.592740] env[62405]: DEBUG nova.virt.hardware [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1638.597029] env[62405]: DEBUG nova.virt.hardware [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1638.597029] env[62405]: DEBUG nova.virt.hardware [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1638.597029] env[62405]: DEBUG nova.virt.hardware [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1638.597029] env[62405]: DEBUG nova.virt.hardware [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1638.597029] env[62405]: DEBUG nova.virt.hardware [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1638.597228] env[62405]: DEBUG nova.virt.hardware [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1638.597228] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87125fff-745a-45b9-a4c1-c249c3ca819c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.606145] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17650bb2-e76c-4704-be7c-4ebff1b4778f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.668919] env[62405]: DEBUG oslo_concurrency.lockutils [None req-22b89288-dc0d-4ebf-b710-e994341eb245 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Lock "1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 62.855s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1638.689313] env[62405]: DEBUG oslo_concurrency.lockutils [None req-43bee4dd-8b8b-459f-bc1c-00470ecac135 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Acquiring lock "8ea521a4-7d4c-4f0f-9bf6-44b8559eefd6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1638.689524] env[62405]: DEBUG oslo_concurrency.lockutils [None req-43bee4dd-8b8b-459f-bc1c-00470ecac135 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Lock "8ea521a4-7d4c-4f0f-9bf6-44b8559eefd6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1638.689739] env[62405]: DEBUG oslo_concurrency.lockutils [None req-43bee4dd-8b8b-459f-bc1c-00470ecac135 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Acquiring lock "8ea521a4-7d4c-4f0f-9bf6-44b8559eefd6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1638.689923] env[62405]: DEBUG oslo_concurrency.lockutils [None req-43bee4dd-8b8b-459f-bc1c-00470ecac135 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Lock "8ea521a4-7d4c-4f0f-9bf6-44b8559eefd6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1638.690108] env[62405]: DEBUG oslo_concurrency.lockutils [None req-43bee4dd-8b8b-459f-bc1c-00470ecac135 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Lock "8ea521a4-7d4c-4f0f-9bf6-44b8559eefd6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1638.692180] env[62405]: INFO nova.compute.manager [None req-43bee4dd-8b8b-459f-bc1c-00470ecac135 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] [instance: 8ea521a4-7d4c-4f0f-9bf6-44b8559eefd6] Terminating instance [ 1638.763605] env[62405]: DEBUG oslo_vmware.api [None req-4a89d3de-d2c4-42d4-b2ca-8e873a196d16 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Task: {'id': task-1947173, 'name': PowerOnVM_Task} progress is 92%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1638.799349] env[62405]: DEBUG nova.scheduler.client.report [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1638.814784] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947175, 'name': CreateVM_Task, 'duration_secs': 0.452617} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1638.814784] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c392d6f3-b638-4857-826d-760c38b7d291] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1638.815287] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1638.815529] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1638.815877] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1638.816145] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ae482200-6d00-4fa8-ac3e-e9eb353475d0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.822927] env[62405]: DEBUG oslo_vmware.api [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Waiting for the task: (returnval){ [ 1638.822927] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52682382-62a4-dd21-f735-0c9909e27c64" [ 1638.822927] env[62405]: _type = "Task" [ 1638.822927] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1638.840780] env[62405]: INFO nova.compute.manager [None req-af6579f0-3bb2-4b49-8579-38d377c84d6d tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] [instance: 1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac] Rebuilding instance [ 1638.846039] env[62405]: DEBUG oslo_vmware.api [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52682382-62a4-dd21-f735-0c9909e27c64, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1638.901889] env[62405]: DEBUG nova.compute.manager [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] [instance: 34ec55c6-1a7a-4ffa-8efd-9eedd7495d44] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1638.909336] env[62405]: DEBUG nova.compute.manager [None req-af6579f0-3bb2-4b49-8579-38d377c84d6d tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] [instance: 1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1638.910221] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-984363ff-2548-4db0-b3af-854ace317161 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.172305] env[62405]: DEBUG nova.compute.manager [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 6c6a3974-c87e-47ed-a025-d6221a8decd7] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1639.199020] env[62405]: DEBUG nova.compute.manager [None req-43bee4dd-8b8b-459f-bc1c-00470ecac135 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] [instance: 8ea521a4-7d4c-4f0f-9bf6-44b8559eefd6] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1639.199020] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-43bee4dd-8b8b-459f-bc1c-00470ecac135 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] [instance: 8ea521a4-7d4c-4f0f-9bf6-44b8559eefd6] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1639.199020] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f1e2865-656a-47dc-a434-bf96de0e2d13 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.207938] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-43bee4dd-8b8b-459f-bc1c-00470ecac135 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] [instance: 8ea521a4-7d4c-4f0f-9bf6-44b8559eefd6] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1639.207938] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-18c529f1-c4a9-4f04-a88b-68cf82694021 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.218210] env[62405]: DEBUG oslo_vmware.api [None req-43bee4dd-8b8b-459f-bc1c-00470ecac135 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Waiting for the task: (returnval){ [ 1639.218210] env[62405]: value = "task-1947176" [ 1639.218210] env[62405]: _type = "Task" [ 1639.218210] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1639.231613] env[62405]: DEBUG oslo_vmware.api [None req-43bee4dd-8b8b-459f-bc1c-00470ecac135 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Task: {'id': task-1947176, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1639.266903] env[62405]: DEBUG oslo_vmware.api [None req-4a89d3de-d2c4-42d4-b2ca-8e873a196d16 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Task: {'id': task-1947173, 'name': PowerOnVM_Task, 'duration_secs': 0.593243} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1639.270325] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a89d3de-d2c4-42d4-b2ca-8e873a196d16 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] [instance: d5686d7c-a73f-4e02-8726-eab8221a0eae] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1639.271089] env[62405]: DEBUG nova.compute.manager [None req-4a89d3de-d2c4-42d4-b2ca-8e873a196d16 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] [instance: d5686d7c-a73f-4e02-8726-eab8221a0eae] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1639.272515] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ded9e8af-7023-43b9-b3d6-2ecdb16716a0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.307025] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.776s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1639.307025] env[62405]: DEBUG nova.compute.manager [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1639.309146] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.012s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1639.310721] env[62405]: INFO nova.compute.claims [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] [instance: a9f83357-4898-44ff-a6d8-ea6621453de9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1639.336161] env[62405]: DEBUG oslo_vmware.api [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52682382-62a4-dd21-f735-0c9909e27c64, 'name': SearchDatastore_Task, 'duration_secs': 0.015445} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1639.340021] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1639.340021] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] [instance: c392d6f3-b638-4857-826d-760c38b7d291] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1639.340021] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1639.340021] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1639.340245] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1639.340245] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9978ba15-8d28-4a2a-b773-b2c3f4eca8ce {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.352086] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1639.352086] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1639.352086] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-94bb6869-3c86-4ef0-b171-2b2af889fb38 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.359423] env[62405]: DEBUG oslo_vmware.api [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Waiting for the task: (returnval){ [ 1639.359423] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52fd44d4-3938-2f1a-587b-56ca55531799" [ 1639.359423] env[62405]: _type = "Task" [ 1639.359423] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1639.368959] env[62405]: DEBUG oslo_vmware.api [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52fd44d4-3938-2f1a-587b-56ca55531799, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1639.378671] env[62405]: DEBUG nova.network.neutron [req-d2fd9b4a-e781-40f7-a4b7-c235a11ba62f req-dd5edba9-cf36-47dc-9620-5f8185629e74 service nova] [instance: c392d6f3-b638-4857-826d-760c38b7d291] Updated VIF entry in instance network info cache for port e766daac-fbcb-489e-aef5-d97530246eb0. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1639.378671] env[62405]: DEBUG nova.network.neutron [req-d2fd9b4a-e781-40f7-a4b7-c235a11ba62f req-dd5edba9-cf36-47dc-9620-5f8185629e74 service nova] [instance: c392d6f3-b638-4857-826d-760c38b7d291] Updating instance_info_cache with network_info: [{"id": "e766daac-fbcb-489e-aef5-d97530246eb0", "address": "fa:16:3e:bf:a1:1e", "network": {"id": "4a1adf8e-9b11-47cf-a09e-910b0fd2b5ed", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-2001588294-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "56442ca63108497d97070d582050f97b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee9ce73d-4ee8-4b28-b7d3-3a5735039627", "external-id": "cl2-zone-465", "segmentation_id": 465, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape766daac-fb", "ovs_interfaceid": "e766daac-fbcb-489e-aef5-d97530246eb0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1639.410020] env[62405]: DEBUG oslo_concurrency.lockutils [None req-341dd44c-33cc-4493-9805-6cc4286b9cf6 tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Acquiring lock "792cd2c8-a67d-4b16-93ab-722fcc8b622d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1639.410020] env[62405]: DEBUG oslo_concurrency.lockutils [None req-341dd44c-33cc-4493-9805-6cc4286b9cf6 tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Lock "792cd2c8-a67d-4b16-93ab-722fcc8b622d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1639.410020] env[62405]: DEBUG oslo_concurrency.lockutils [None req-341dd44c-33cc-4493-9805-6cc4286b9cf6 tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Acquiring lock "792cd2c8-a67d-4b16-93ab-722fcc8b622d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1639.410020] env[62405]: DEBUG oslo_concurrency.lockutils [None req-341dd44c-33cc-4493-9805-6cc4286b9cf6 tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Lock "792cd2c8-a67d-4b16-93ab-722fcc8b622d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1639.410274] env[62405]: DEBUG oslo_concurrency.lockutils [None req-341dd44c-33cc-4493-9805-6cc4286b9cf6 tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Lock "792cd2c8-a67d-4b16-93ab-722fcc8b622d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1639.412400] env[62405]: INFO nova.compute.manager [None req-341dd44c-33cc-4493-9805-6cc4286b9cf6 tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] [instance: 792cd2c8-a67d-4b16-93ab-722fcc8b622d] Terminating instance [ 1639.432528] env[62405]: DEBUG oslo_concurrency.lockutils [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1639.702636] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1639.738568] env[62405]: DEBUG oslo_vmware.api [None req-43bee4dd-8b8b-459f-bc1c-00470ecac135 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Task: {'id': task-1947176, 'name': PowerOffVM_Task, 'duration_secs': 0.291322} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1639.738568] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-43bee4dd-8b8b-459f-bc1c-00470ecac135 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] [instance: 8ea521a4-7d4c-4f0f-9bf6-44b8559eefd6] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1639.738568] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-43bee4dd-8b8b-459f-bc1c-00470ecac135 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] [instance: 8ea521a4-7d4c-4f0f-9bf6-44b8559eefd6] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1639.738568] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cf6ba2db-8cee-41f6-82ca-6b2978f535f5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.793285] env[62405]: DEBUG nova.network.neutron [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59957a81-5297-43d3-a673-024a53a19116] Successfully updated port: 740acbcf-c471-4523-a1ba-a92cc67c2990 {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1639.797213] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4a89d3de-d2c4-42d4-b2ca-8e873a196d16 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1639.816864] env[62405]: DEBUG nova.compute.utils [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1639.820966] env[62405]: DEBUG nova.compute.manager [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1639.820966] env[62405]: DEBUG nova.network.neutron [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1639.844863] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-43bee4dd-8b8b-459f-bc1c-00470ecac135 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] [instance: 8ea521a4-7d4c-4f0f-9bf6-44b8559eefd6] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1639.845134] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-43bee4dd-8b8b-459f-bc1c-00470ecac135 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] [instance: 8ea521a4-7d4c-4f0f-9bf6-44b8559eefd6] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1639.845315] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-43bee4dd-8b8b-459f-bc1c-00470ecac135 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Deleting the datastore file [datastore1] 8ea521a4-7d4c-4f0f-9bf6-44b8559eefd6 {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1639.846802] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ce9f6500-ea66-4268-b172-9307d4b103df {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.855598] env[62405]: DEBUG oslo_vmware.api [None req-43bee4dd-8b8b-459f-bc1c-00470ecac135 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Waiting for the task: (returnval){ [ 1639.855598] env[62405]: value = "task-1947178" [ 1639.855598] env[62405]: _type = "Task" [ 1639.855598] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1639.871585] env[62405]: DEBUG oslo_vmware.api [None req-43bee4dd-8b8b-459f-bc1c-00470ecac135 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Task: {'id': task-1947178, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1639.876197] env[62405]: DEBUG oslo_vmware.api [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52fd44d4-3938-2f1a-587b-56ca55531799, 'name': SearchDatastore_Task, 'duration_secs': 0.013179} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1639.877249] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ef3d0f30-c64e-4a29-b0cd-b8435a3acccc {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.880470] env[62405]: DEBUG oslo_concurrency.lockutils [req-d2fd9b4a-e781-40f7-a4b7-c235a11ba62f req-dd5edba9-cf36-47dc-9620-5f8185629e74 service nova] Releasing lock "refresh_cache-c392d6f3-b638-4857-826d-760c38b7d291" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1639.883740] env[62405]: DEBUG oslo_vmware.api [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Waiting for the task: (returnval){ [ 1639.883740] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52aae71d-531b-4138-c2a9-1f7d7edb3479" [ 1639.883740] env[62405]: _type = "Task" [ 1639.883740] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1639.896403] env[62405]: DEBUG oslo_vmware.api [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52aae71d-531b-4138-c2a9-1f7d7edb3479, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1639.913585] env[62405]: DEBUG nova.policy [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1bea5fa632f74543a680f69edf3c05ff', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e3cd6b7f1ce346e98fe8bff2423f34ab', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1639.920096] env[62405]: DEBUG nova.compute.manager [None req-341dd44c-33cc-4493-9805-6cc4286b9cf6 tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] [instance: 792cd2c8-a67d-4b16-93ab-722fcc8b622d] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1639.920327] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-341dd44c-33cc-4493-9805-6cc4286b9cf6 tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] [instance: 792cd2c8-a67d-4b16-93ab-722fcc8b622d] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1639.921203] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-437b500d-1460-4b55-88b9-80d98719cc61 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.927536] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-af6579f0-3bb2-4b49-8579-38d377c84d6d tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] [instance: 1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1639.927536] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-15be4446-5244-4966-8ced-4f366a911fa7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.931572] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-341dd44c-33cc-4493-9805-6cc4286b9cf6 tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] [instance: 792cd2c8-a67d-4b16-93ab-722fcc8b622d] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1639.932426] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-34a59ac4-fbe8-48fa-a7f9-f17c6b725d49 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.941977] env[62405]: DEBUG oslo_vmware.api [None req-af6579f0-3bb2-4b49-8579-38d377c84d6d tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Waiting for the task: (returnval){ [ 1639.941977] env[62405]: value = "task-1947179" [ 1639.941977] env[62405]: _type = "Task" [ 1639.941977] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1639.942283] env[62405]: DEBUG oslo_vmware.api [None req-341dd44c-33cc-4493-9805-6cc4286b9cf6 tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Waiting for the task: (returnval){ [ 1639.942283] env[62405]: value = "task-1947180" [ 1639.942283] env[62405]: _type = "Task" [ 1639.942283] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1639.962046] env[62405]: DEBUG oslo_vmware.api [None req-af6579f0-3bb2-4b49-8579-38d377c84d6d tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Task: {'id': task-1947179, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1639.966263] env[62405]: DEBUG oslo_vmware.api [None req-341dd44c-33cc-4493-9805-6cc4286b9cf6 tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Task: {'id': task-1947180, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1640.298048] env[62405]: DEBUG oslo_concurrency.lockutils [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Acquiring lock "refresh_cache-59957a81-5297-43d3-a673-024a53a19116" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1640.298672] env[62405]: DEBUG oslo_concurrency.lockutils [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Acquired lock "refresh_cache-59957a81-5297-43d3-a673-024a53a19116" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1640.298672] env[62405]: DEBUG nova.network.neutron [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59957a81-5297-43d3-a673-024a53a19116] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1640.324020] env[62405]: DEBUG nova.compute.manager [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1640.349485] env[62405]: DEBUG nova.network.neutron [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Successfully created port: 7fbae16c-e943-4752-8a7e-92bdea130e1a {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1640.372745] env[62405]: DEBUG oslo_vmware.api [None req-43bee4dd-8b8b-459f-bc1c-00470ecac135 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Task: {'id': task-1947178, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.23649} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1640.372745] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-43bee4dd-8b8b-459f-bc1c-00470ecac135 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1640.372745] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-43bee4dd-8b8b-459f-bc1c-00470ecac135 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] [instance: 8ea521a4-7d4c-4f0f-9bf6-44b8559eefd6] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1640.372745] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-43bee4dd-8b8b-459f-bc1c-00470ecac135 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] [instance: 8ea521a4-7d4c-4f0f-9bf6-44b8559eefd6] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1640.372994] env[62405]: INFO nova.compute.manager [None req-43bee4dd-8b8b-459f-bc1c-00470ecac135 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] [instance: 8ea521a4-7d4c-4f0f-9bf6-44b8559eefd6] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1640.373222] env[62405]: DEBUG oslo.service.loopingcall [None req-43bee4dd-8b8b-459f-bc1c-00470ecac135 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1640.373434] env[62405]: DEBUG nova.compute.manager [-] [instance: 8ea521a4-7d4c-4f0f-9bf6-44b8559eefd6] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1640.373549] env[62405]: DEBUG nova.network.neutron [-] [instance: 8ea521a4-7d4c-4f0f-9bf6-44b8559eefd6] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1640.403262] env[62405]: DEBUG oslo_vmware.api [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52aae71d-531b-4138-c2a9-1f7d7edb3479, 'name': SearchDatastore_Task, 'duration_secs': 0.025972} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1640.403559] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1640.403824] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] c392d6f3-b638-4857-826d-760c38b7d291/c392d6f3-b638-4857-826d-760c38b7d291.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1640.404115] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2823684b-ae3b-4fa2-b5d2-0fc5d231eadb {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.416171] env[62405]: DEBUG oslo_vmware.api [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Waiting for the task: (returnval){ [ 1640.416171] env[62405]: value = "task-1947181" [ 1640.416171] env[62405]: _type = "Task" [ 1640.416171] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1640.426658] env[62405]: DEBUG oslo_vmware.api [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Task: {'id': task-1947181, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1640.456403] env[62405]: DEBUG oslo_vmware.api [None req-af6579f0-3bb2-4b49-8579-38d377c84d6d tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Task: {'id': task-1947179, 'name': PowerOffVM_Task, 'duration_secs': 0.219985} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1640.465022] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-af6579f0-3bb2-4b49-8579-38d377c84d6d tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] [instance: 1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1640.465022] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-af6579f0-3bb2-4b49-8579-38d377c84d6d tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] [instance: 1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1640.465022] env[62405]: DEBUG oslo_vmware.api [None req-341dd44c-33cc-4493-9805-6cc4286b9cf6 tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Task: {'id': task-1947180, 'name': PowerOffVM_Task, 'duration_secs': 0.246063} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1640.465022] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a254a8b-35cf-46b1-bf32-e2acf3ac3307 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.469360] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-341dd44c-33cc-4493-9805-6cc4286b9cf6 tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] [instance: 792cd2c8-a67d-4b16-93ab-722fcc8b622d] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1640.469529] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-341dd44c-33cc-4493-9805-6cc4286b9cf6 tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] [instance: 792cd2c8-a67d-4b16-93ab-722fcc8b622d] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1640.469764] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e98a4337-54ba-4f7c-a643-c506e53e28f2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.478588] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-af6579f0-3bb2-4b49-8579-38d377c84d6d tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] [instance: 1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1640.478588] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f58befc5-bdd4-4dcd-b767-07724646c987 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.518841] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-af6579f0-3bb2-4b49-8579-38d377c84d6d tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] [instance: 1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1640.519120] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-af6579f0-3bb2-4b49-8579-38d377c84d6d tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] [instance: 1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1640.519304] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-af6579f0-3bb2-4b49-8579-38d377c84d6d tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Deleting the datastore file [datastore1] 1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1640.519764] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ec48e49c-e45c-4a0c-a65b-34c3b346975d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.530185] env[62405]: DEBUG oslo_vmware.api [None req-af6579f0-3bb2-4b49-8579-38d377c84d6d tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Waiting for the task: (returnval){ [ 1640.530185] env[62405]: value = "task-1947184" [ 1640.530185] env[62405]: _type = "Task" [ 1640.530185] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1640.542250] env[62405]: DEBUG oslo_vmware.api [None req-af6579f0-3bb2-4b49-8579-38d377c84d6d tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Task: {'id': task-1947184, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1640.547112] env[62405]: DEBUG nova.compute.manager [req-a4746c62-2ac4-4db7-9fcb-68922f13faab req-77d85f4e-8a31-4954-9c10-f4e0fce2f707 service nova] [instance: 59957a81-5297-43d3-a673-024a53a19116] Received event network-vif-plugged-740acbcf-c471-4523-a1ba-a92cc67c2990 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1640.547348] env[62405]: DEBUG oslo_concurrency.lockutils [req-a4746c62-2ac4-4db7-9fcb-68922f13faab req-77d85f4e-8a31-4954-9c10-f4e0fce2f707 service nova] Acquiring lock "59957a81-5297-43d3-a673-024a53a19116-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1640.547571] env[62405]: DEBUG oslo_concurrency.lockutils [req-a4746c62-2ac4-4db7-9fcb-68922f13faab req-77d85f4e-8a31-4954-9c10-f4e0fce2f707 service nova] Lock "59957a81-5297-43d3-a673-024a53a19116-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1640.547735] env[62405]: DEBUG oslo_concurrency.lockutils [req-a4746c62-2ac4-4db7-9fcb-68922f13faab req-77d85f4e-8a31-4954-9c10-f4e0fce2f707 service nova] Lock "59957a81-5297-43d3-a673-024a53a19116-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1640.547913] env[62405]: DEBUG nova.compute.manager [req-a4746c62-2ac4-4db7-9fcb-68922f13faab req-77d85f4e-8a31-4954-9c10-f4e0fce2f707 service nova] [instance: 59957a81-5297-43d3-a673-024a53a19116] No waiting events found dispatching network-vif-plugged-740acbcf-c471-4523-a1ba-a92cc67c2990 {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1640.548129] env[62405]: WARNING nova.compute.manager [req-a4746c62-2ac4-4db7-9fcb-68922f13faab req-77d85f4e-8a31-4954-9c10-f4e0fce2f707 service nova] [instance: 59957a81-5297-43d3-a673-024a53a19116] Received unexpected event network-vif-plugged-740acbcf-c471-4523-a1ba-a92cc67c2990 for instance with vm_state building and task_state spawning. [ 1640.548319] env[62405]: DEBUG nova.compute.manager [req-a4746c62-2ac4-4db7-9fcb-68922f13faab req-77d85f4e-8a31-4954-9c10-f4e0fce2f707 service nova] [instance: 59957a81-5297-43d3-a673-024a53a19116] Received event network-changed-740acbcf-c471-4523-a1ba-a92cc67c2990 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1640.548539] env[62405]: DEBUG nova.compute.manager [req-a4746c62-2ac4-4db7-9fcb-68922f13faab req-77d85f4e-8a31-4954-9c10-f4e0fce2f707 service nova] [instance: 59957a81-5297-43d3-a673-024a53a19116] Refreshing instance network info cache due to event network-changed-740acbcf-c471-4523-a1ba-a92cc67c2990. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1640.548729] env[62405]: DEBUG oslo_concurrency.lockutils [req-a4746c62-2ac4-4db7-9fcb-68922f13faab req-77d85f4e-8a31-4954-9c10-f4e0fce2f707 service nova] Acquiring lock "refresh_cache-59957a81-5297-43d3-a673-024a53a19116" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1640.576644] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-341dd44c-33cc-4493-9805-6cc4286b9cf6 tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] [instance: 792cd2c8-a67d-4b16-93ab-722fcc8b622d] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1640.576879] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-341dd44c-33cc-4493-9805-6cc4286b9cf6 tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] [instance: 792cd2c8-a67d-4b16-93ab-722fcc8b622d] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1640.577082] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-341dd44c-33cc-4493-9805-6cc4286b9cf6 tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Deleting the datastore file [datastore1] 792cd2c8-a67d-4b16-93ab-722fcc8b622d {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1640.577374] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1f4e83a9-a24a-4d36-bb36-99d4cf82e77f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.589092] env[62405]: DEBUG oslo_vmware.api [None req-341dd44c-33cc-4493-9805-6cc4286b9cf6 tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Waiting for the task: (returnval){ [ 1640.589092] env[62405]: value = "task-1947185" [ 1640.589092] env[62405]: _type = "Task" [ 1640.589092] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1640.600699] env[62405]: DEBUG oslo_vmware.api [None req-341dd44c-33cc-4493-9805-6cc4286b9cf6 tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Task: {'id': task-1947185, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1640.909835] env[62405]: DEBUG nova.network.neutron [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59957a81-5297-43d3-a673-024a53a19116] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1640.924017] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1474c8b5-615f-4aaf-b114-ef10365be08e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.933211] env[62405]: DEBUG oslo_vmware.api [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Task: {'id': task-1947181, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1640.937113] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-119adf51-9e28-4373-a3f9-53b0cb170310 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.978899] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fd68843-c1ac-49d6-a0d0-fe4c3283cb85 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.991992] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6577860-3147-44a4-8de3-a3ac9295321f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.010306] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4458c6a2-0bc9-4d4a-9354-48b1433d6fd3 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Acquiring lock "d5686d7c-a73f-4e02-8726-eab8221a0eae" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1641.010399] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4458c6a2-0bc9-4d4a-9354-48b1433d6fd3 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Lock "d5686d7c-a73f-4e02-8726-eab8221a0eae" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1641.010714] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4458c6a2-0bc9-4d4a-9354-48b1433d6fd3 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Acquiring lock "d5686d7c-a73f-4e02-8726-eab8221a0eae-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1641.010953] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4458c6a2-0bc9-4d4a-9354-48b1433d6fd3 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Lock "d5686d7c-a73f-4e02-8726-eab8221a0eae-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1641.011601] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4458c6a2-0bc9-4d4a-9354-48b1433d6fd3 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Lock "d5686d7c-a73f-4e02-8726-eab8221a0eae-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1641.013650] env[62405]: DEBUG nova.compute.provider_tree [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1641.015604] env[62405]: INFO nova.compute.manager [None req-4458c6a2-0bc9-4d4a-9354-48b1433d6fd3 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] [instance: d5686d7c-a73f-4e02-8726-eab8221a0eae] Terminating instance [ 1641.046052] env[62405]: DEBUG oslo_vmware.api [None req-af6579f0-3bb2-4b49-8579-38d377c84d6d tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Task: {'id': task-1947184, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.202217} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1641.046052] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-af6579f0-3bb2-4b49-8579-38d377c84d6d tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1641.046052] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-af6579f0-3bb2-4b49-8579-38d377c84d6d tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] [instance: 1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1641.046052] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-af6579f0-3bb2-4b49-8579-38d377c84d6d tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] [instance: 1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1641.102460] env[62405]: DEBUG oslo_vmware.api [None req-341dd44c-33cc-4493-9805-6cc4286b9cf6 tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Task: {'id': task-1947185, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1641.338296] env[62405]: DEBUG nova.compute.manager [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1641.360191] env[62405]: DEBUG nova.network.neutron [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59957a81-5297-43d3-a673-024a53a19116] Updating instance_info_cache with network_info: [{"id": "740acbcf-c471-4523-a1ba-a92cc67c2990", "address": "fa:16:3e:d6:1f:d6", "network": {"id": "ac0e1447-1c61-4770-8006-3a99edc76f93", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1648941742-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bb1da47e8b1a400fab7817d9e6b282ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "298bb8ef-4765-494c-b157-7a349218bd1e", "external-id": "nsx-vlan-transportzone-905", "segmentation_id": 905, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap740acbcf-c4", "ovs_interfaceid": "740acbcf-c471-4523-a1ba-a92cc67c2990", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1641.365458] env[62405]: DEBUG nova.virt.hardware [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:23:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='dc0b8b8d-2143-43d6-88ba-cc2419f1681a',id=27,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1321132944',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1641.365711] env[62405]: DEBUG nova.virt.hardware [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1641.365878] env[62405]: DEBUG nova.virt.hardware [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1641.366088] env[62405]: DEBUG nova.virt.hardware [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1641.366245] env[62405]: DEBUG nova.virt.hardware [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1641.366422] env[62405]: DEBUG nova.virt.hardware [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1641.366631] env[62405]: DEBUG nova.virt.hardware [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1641.366800] env[62405]: DEBUG nova.virt.hardware [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1641.366980] env[62405]: DEBUG nova.virt.hardware [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1641.367196] env[62405]: DEBUG nova.virt.hardware [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1641.367385] env[62405]: DEBUG nova.virt.hardware [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1641.369393] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-950f5081-fd9a-45cd-9c8a-4627ba10a4ad {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.377924] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95498cd0-4c36-4b2a-9d03-30fa2cf0d43a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.427114] env[62405]: DEBUG oslo_vmware.api [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Task: {'id': task-1947181, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.87644} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1641.427414] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] c392d6f3-b638-4857-826d-760c38b7d291/c392d6f3-b638-4857-826d-760c38b7d291.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1641.428289] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] [instance: c392d6f3-b638-4857-826d-760c38b7d291] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1641.428289] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b5240fde-1d10-4371-ad55-0368c2f872b3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.436707] env[62405]: DEBUG oslo_vmware.api [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Waiting for the task: (returnval){ [ 1641.436707] env[62405]: value = "task-1947186" [ 1641.436707] env[62405]: _type = "Task" [ 1641.436707] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1641.446228] env[62405]: DEBUG oslo_vmware.api [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Task: {'id': task-1947186, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1641.506886] env[62405]: DEBUG nova.compute.manager [req-af2c29f0-d30b-47c7-a63c-fb9dc7fdf189 req-620adaef-ade6-462d-b2bc-e20f267c9a34 service nova] [instance: 8ea521a4-7d4c-4f0f-9bf6-44b8559eefd6] Received event network-vif-deleted-63b67ea3-5aa4-459f-b475-23d131034cb5 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1641.507140] env[62405]: INFO nova.compute.manager [req-af2c29f0-d30b-47c7-a63c-fb9dc7fdf189 req-620adaef-ade6-462d-b2bc-e20f267c9a34 service nova] [instance: 8ea521a4-7d4c-4f0f-9bf6-44b8559eefd6] Neutron deleted interface 63b67ea3-5aa4-459f-b475-23d131034cb5; detaching it from the instance and deleting it from the info cache [ 1641.507353] env[62405]: DEBUG nova.network.neutron [req-af2c29f0-d30b-47c7-a63c-fb9dc7fdf189 req-620adaef-ade6-462d-b2bc-e20f267c9a34 service nova] [instance: 8ea521a4-7d4c-4f0f-9bf6-44b8559eefd6] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1641.518482] env[62405]: DEBUG nova.scheduler.client.report [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1641.525165] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4458c6a2-0bc9-4d4a-9354-48b1433d6fd3 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Acquiring lock "refresh_cache-d5686d7c-a73f-4e02-8726-eab8221a0eae" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1641.525165] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4458c6a2-0bc9-4d4a-9354-48b1433d6fd3 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Acquired lock "refresh_cache-d5686d7c-a73f-4e02-8726-eab8221a0eae" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1641.525165] env[62405]: DEBUG nova.network.neutron [None req-4458c6a2-0bc9-4d4a-9354-48b1433d6fd3 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] [instance: d5686d7c-a73f-4e02-8726-eab8221a0eae] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1641.602137] env[62405]: DEBUG oslo_vmware.api [None req-341dd44c-33cc-4493-9805-6cc4286b9cf6 tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Task: {'id': task-1947185, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.748337} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1641.602137] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-341dd44c-33cc-4493-9805-6cc4286b9cf6 tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1641.602137] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-341dd44c-33cc-4493-9805-6cc4286b9cf6 tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] [instance: 792cd2c8-a67d-4b16-93ab-722fcc8b622d] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1641.602137] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-341dd44c-33cc-4493-9805-6cc4286b9cf6 tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] [instance: 792cd2c8-a67d-4b16-93ab-722fcc8b622d] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1641.602137] env[62405]: INFO nova.compute.manager [None req-341dd44c-33cc-4493-9805-6cc4286b9cf6 tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] [instance: 792cd2c8-a67d-4b16-93ab-722fcc8b622d] Took 1.68 seconds to destroy the instance on the hypervisor. [ 1641.602419] env[62405]: DEBUG oslo.service.loopingcall [None req-341dd44c-33cc-4493-9805-6cc4286b9cf6 tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1641.602419] env[62405]: DEBUG nova.compute.manager [-] [instance: 792cd2c8-a67d-4b16-93ab-722fcc8b622d] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1641.602419] env[62405]: DEBUG nova.network.neutron [-] [instance: 792cd2c8-a67d-4b16-93ab-722fcc8b622d] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1641.645230] env[62405]: DEBUG nova.network.neutron [-] [instance: 8ea521a4-7d4c-4f0f-9bf6-44b8559eefd6] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1641.863312] env[62405]: DEBUG oslo_concurrency.lockutils [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Releasing lock "refresh_cache-59957a81-5297-43d3-a673-024a53a19116" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1641.863874] env[62405]: DEBUG nova.compute.manager [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59957a81-5297-43d3-a673-024a53a19116] Instance network_info: |[{"id": "740acbcf-c471-4523-a1ba-a92cc67c2990", "address": "fa:16:3e:d6:1f:d6", "network": {"id": "ac0e1447-1c61-4770-8006-3a99edc76f93", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1648941742-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bb1da47e8b1a400fab7817d9e6b282ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "298bb8ef-4765-494c-b157-7a349218bd1e", "external-id": "nsx-vlan-transportzone-905", "segmentation_id": 905, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap740acbcf-c4", "ovs_interfaceid": "740acbcf-c471-4523-a1ba-a92cc67c2990", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1641.864224] env[62405]: DEBUG oslo_concurrency.lockutils [req-a4746c62-2ac4-4db7-9fcb-68922f13faab req-77d85f4e-8a31-4954-9c10-f4e0fce2f707 service nova] Acquired lock "refresh_cache-59957a81-5297-43d3-a673-024a53a19116" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1641.864478] env[62405]: DEBUG nova.network.neutron [req-a4746c62-2ac4-4db7-9fcb-68922f13faab req-77d85f4e-8a31-4954-9c10-f4e0fce2f707 service nova] [instance: 59957a81-5297-43d3-a673-024a53a19116] Refreshing network info cache for port 740acbcf-c471-4523-a1ba-a92cc67c2990 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1641.865671] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59957a81-5297-43d3-a673-024a53a19116] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d6:1f:d6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '298bb8ef-4765-494c-b157-7a349218bd1e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '740acbcf-c471-4523-a1ba-a92cc67c2990', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1641.877411] env[62405]: DEBUG oslo.service.loopingcall [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1641.878276] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 59957a81-5297-43d3-a673-024a53a19116] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1641.878276] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-649586b9-8969-4c2e-a897-ec5bb573ce1c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.900612] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1641.900612] env[62405]: value = "task-1947187" [ 1641.900612] env[62405]: _type = "Task" [ 1641.900612] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1641.909192] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947187, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1641.947263] env[62405]: DEBUG oslo_vmware.api [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Task: {'id': task-1947186, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.140117} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1641.947557] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] [instance: c392d6f3-b638-4857-826d-760c38b7d291] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1641.948338] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e41f4182-a8db-4455-b583-b64900490330 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.972356] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] [instance: c392d6f3-b638-4857-826d-760c38b7d291] Reconfiguring VM instance instance-0000002b to attach disk [datastore1] c392d6f3-b638-4857-826d-760c38b7d291/c392d6f3-b638-4857-826d-760c38b7d291.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1641.973389] env[62405]: DEBUG nova.network.neutron [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Successfully updated port: 7fbae16c-e943-4752-8a7e-92bdea130e1a {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1641.974557] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6b3aaae5-b1df-4363-a87e-c102374d74e6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.996530] env[62405]: DEBUG oslo_vmware.api [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Waiting for the task: (returnval){ [ 1641.996530] env[62405]: value = "task-1947188" [ 1641.996530] env[62405]: _type = "Task" [ 1641.996530] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1642.008844] env[62405]: DEBUG oslo_vmware.api [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Task: {'id': task-1947188, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1642.009731] env[62405]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f9b2e347-4d9d-407f-b162-3ad910ce53fe {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.020833] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0866eb61-55db-4526-9f4e-c0c77772ee3e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.034216] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.723s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1642.034216] env[62405]: DEBUG nova.compute.manager [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] [instance: a9f83357-4898-44ff-a6d8-ea6621453de9] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1642.038436] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b97ea554-13a3-4305-999f-6075250c6fb4 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.350s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1642.038664] env[62405]: DEBUG nova.objects.instance [None req-b97ea554-13a3-4305-999f-6075250c6fb4 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Lazy-loading 'resources' on Instance uuid b3647042-89a1-4d15-b85e-49a5c8def1d4 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1642.070818] env[62405]: DEBUG nova.compute.manager [req-af2c29f0-d30b-47c7-a63c-fb9dc7fdf189 req-620adaef-ade6-462d-b2bc-e20f267c9a34 service nova] [instance: 8ea521a4-7d4c-4f0f-9bf6-44b8559eefd6] Detach interface failed, port_id=63b67ea3-5aa4-459f-b475-23d131034cb5, reason: Instance 8ea521a4-7d4c-4f0f-9bf6-44b8559eefd6 could not be found. {{(pid=62405) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11483}} [ 1642.072379] env[62405]: DEBUG nova.network.neutron [None req-4458c6a2-0bc9-4d4a-9354-48b1433d6fd3 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] [instance: d5686d7c-a73f-4e02-8726-eab8221a0eae] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1642.117922] env[62405]: DEBUG nova.virt.hardware [None req-af6579f0-3bb2-4b49-8579-38d377c84d6d tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1642.117922] env[62405]: DEBUG nova.virt.hardware [None req-af6579f0-3bb2-4b49-8579-38d377c84d6d tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1642.118206] env[62405]: DEBUG nova.virt.hardware [None req-af6579f0-3bb2-4b49-8579-38d377c84d6d tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1642.118254] env[62405]: DEBUG nova.virt.hardware [None req-af6579f0-3bb2-4b49-8579-38d377c84d6d tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1642.118439] env[62405]: DEBUG nova.virt.hardware [None req-af6579f0-3bb2-4b49-8579-38d377c84d6d tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1642.118542] env[62405]: DEBUG nova.virt.hardware [None req-af6579f0-3bb2-4b49-8579-38d377c84d6d tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1642.118819] env[62405]: DEBUG nova.virt.hardware [None req-af6579f0-3bb2-4b49-8579-38d377c84d6d tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1642.118981] env[62405]: DEBUG nova.virt.hardware [None req-af6579f0-3bb2-4b49-8579-38d377c84d6d tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1642.119169] env[62405]: DEBUG nova.virt.hardware [None req-af6579f0-3bb2-4b49-8579-38d377c84d6d tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1642.119364] env[62405]: DEBUG nova.virt.hardware [None req-af6579f0-3bb2-4b49-8579-38d377c84d6d tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1642.119548] env[62405]: DEBUG nova.virt.hardware [None req-af6579f0-3bb2-4b49-8579-38d377c84d6d tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1642.120748] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56c4cba5-d5ae-405a-80a2-19310d4698eb {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.130012] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efa1591d-20c5-4349-a8f3-d3540e7fd11d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.145450] env[62405]: INFO nova.compute.manager [-] [instance: 8ea521a4-7d4c-4f0f-9bf6-44b8559eefd6] Took 1.77 seconds to deallocate network for instance. [ 1642.145934] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-af6579f0-3bb2-4b49-8579-38d377c84d6d tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] [instance: 1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac] Instance VIF info [] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1642.151371] env[62405]: DEBUG oslo.service.loopingcall [None req-af6579f0-3bb2-4b49-8579-38d377c84d6d tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1642.153245] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1642.157413] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-596674ac-50d0-408a-8c90-d6ed3d0a3e0f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.173451] env[62405]: DEBUG nova.network.neutron [None req-4458c6a2-0bc9-4d4a-9354-48b1433d6fd3 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] [instance: d5686d7c-a73f-4e02-8726-eab8221a0eae] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1642.182480] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1642.182480] env[62405]: value = "task-1947189" [ 1642.182480] env[62405]: _type = "Task" [ 1642.182480] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1642.193041] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947189, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1642.414663] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947187, 'name': CreateVM_Task} progress is 25%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1642.489829] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Acquiring lock "refresh_cache-3c9487ff-2092-4cde-82d5-b38e5bc5c6e3" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1642.489975] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Acquired lock "refresh_cache-3c9487ff-2092-4cde-82d5-b38e5bc5c6e3" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1642.490142] env[62405]: DEBUG nova.network.neutron [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1642.510187] env[62405]: DEBUG oslo_vmware.api [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Task: {'id': task-1947188, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1642.540218] env[62405]: DEBUG nova.compute.utils [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1642.541599] env[62405]: DEBUG nova.compute.manager [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] [instance: a9f83357-4898-44ff-a6d8-ea6621453de9] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1642.541772] env[62405]: DEBUG nova.network.neutron [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] [instance: a9f83357-4898-44ff-a6d8-ea6621453de9] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1642.637869] env[62405]: DEBUG nova.compute.manager [req-47045816-0e34-4932-a9c3-64cfbbee3bc9 req-96a6d54b-f671-450a-b82a-723a5fd9432c service nova] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Received event network-vif-plugged-7fbae16c-e943-4752-8a7e-92bdea130e1a {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1642.637869] env[62405]: DEBUG oslo_concurrency.lockutils [req-47045816-0e34-4932-a9c3-64cfbbee3bc9 req-96a6d54b-f671-450a-b82a-723a5fd9432c service nova] Acquiring lock "3c9487ff-2092-4cde-82d5-b38e5bc5c6e3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1642.637869] env[62405]: DEBUG oslo_concurrency.lockutils [req-47045816-0e34-4932-a9c3-64cfbbee3bc9 req-96a6d54b-f671-450a-b82a-723a5fd9432c service nova] Lock "3c9487ff-2092-4cde-82d5-b38e5bc5c6e3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1642.637869] env[62405]: DEBUG oslo_concurrency.lockutils [req-47045816-0e34-4932-a9c3-64cfbbee3bc9 req-96a6d54b-f671-450a-b82a-723a5fd9432c service nova] Lock "3c9487ff-2092-4cde-82d5-b38e5bc5c6e3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1642.637869] env[62405]: DEBUG nova.compute.manager [req-47045816-0e34-4932-a9c3-64cfbbee3bc9 req-96a6d54b-f671-450a-b82a-723a5fd9432c service nova] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] No waiting events found dispatching network-vif-plugged-7fbae16c-e943-4752-8a7e-92bdea130e1a {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1642.638343] env[62405]: WARNING nova.compute.manager [req-47045816-0e34-4932-a9c3-64cfbbee3bc9 req-96a6d54b-f671-450a-b82a-723a5fd9432c service nova] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Received unexpected event network-vif-plugged-7fbae16c-e943-4752-8a7e-92bdea130e1a for instance with vm_state building and task_state spawning. [ 1642.638587] env[62405]: DEBUG nova.compute.manager [req-47045816-0e34-4932-a9c3-64cfbbee3bc9 req-96a6d54b-f671-450a-b82a-723a5fd9432c service nova] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Received event network-changed-7fbae16c-e943-4752-8a7e-92bdea130e1a {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1642.639621] env[62405]: DEBUG nova.compute.manager [req-47045816-0e34-4932-a9c3-64cfbbee3bc9 req-96a6d54b-f671-450a-b82a-723a5fd9432c service nova] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Refreshing instance network info cache due to event network-changed-7fbae16c-e943-4752-8a7e-92bdea130e1a. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1642.639747] env[62405]: DEBUG oslo_concurrency.lockutils [req-47045816-0e34-4932-a9c3-64cfbbee3bc9 req-96a6d54b-f671-450a-b82a-723a5fd9432c service nova] Acquiring lock "refresh_cache-3c9487ff-2092-4cde-82d5-b38e5bc5c6e3" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1642.668623] env[62405]: DEBUG nova.network.neutron [req-a4746c62-2ac4-4db7-9fcb-68922f13faab req-77d85f4e-8a31-4954-9c10-f4e0fce2f707 service nova] [instance: 59957a81-5297-43d3-a673-024a53a19116] Updated VIF entry in instance network info cache for port 740acbcf-c471-4523-a1ba-a92cc67c2990. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1642.668623] env[62405]: DEBUG nova.network.neutron [req-a4746c62-2ac4-4db7-9fcb-68922f13faab req-77d85f4e-8a31-4954-9c10-f4e0fce2f707 service nova] [instance: 59957a81-5297-43d3-a673-024a53a19116] Updating instance_info_cache with network_info: [{"id": "740acbcf-c471-4523-a1ba-a92cc67c2990", "address": "fa:16:3e:d6:1f:d6", "network": {"id": "ac0e1447-1c61-4770-8006-3a99edc76f93", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1648941742-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bb1da47e8b1a400fab7817d9e6b282ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "298bb8ef-4765-494c-b157-7a349218bd1e", "external-id": "nsx-vlan-transportzone-905", "segmentation_id": 905, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap740acbcf-c4", "ovs_interfaceid": "740acbcf-c471-4523-a1ba-a92cc67c2990", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1642.673717] env[62405]: DEBUG oslo_concurrency.lockutils [None req-43bee4dd-8b8b-459f-bc1c-00470ecac135 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1642.676243] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4458c6a2-0bc9-4d4a-9354-48b1433d6fd3 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Releasing lock "refresh_cache-d5686d7c-a73f-4e02-8726-eab8221a0eae" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1642.676687] env[62405]: DEBUG nova.compute.manager [None req-4458c6a2-0bc9-4d4a-9354-48b1433d6fd3 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] [instance: d5686d7c-a73f-4e02-8726-eab8221a0eae] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1642.676929] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-4458c6a2-0bc9-4d4a-9354-48b1433d6fd3 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] [instance: d5686d7c-a73f-4e02-8726-eab8221a0eae] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1642.677943] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6159dcdd-a51d-4c00-8001-02ff4333caa9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.690589] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-4458c6a2-0bc9-4d4a-9354-48b1433d6fd3 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] [instance: d5686d7c-a73f-4e02-8726-eab8221a0eae] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1642.691211] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ccf288bf-f855-48fc-b89f-0df636ddbdaa {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.698196] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947189, 'name': CreateVM_Task} progress is 25%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1642.703581] env[62405]: DEBUG oslo_vmware.api [None req-4458c6a2-0bc9-4d4a-9354-48b1433d6fd3 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Waiting for the task: (returnval){ [ 1642.703581] env[62405]: value = "task-1947190" [ 1642.703581] env[62405]: _type = "Task" [ 1642.703581] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1642.713395] env[62405]: DEBUG oslo_vmware.api [None req-4458c6a2-0bc9-4d4a-9354-48b1433d6fd3 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Task: {'id': task-1947190, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1642.749507] env[62405]: DEBUG nova.policy [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4776ce09f9114ddb9bc1e4c03b8e0512', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3e41926f72174671982ba0d6c4b0f2d7', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1642.913931] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947187, 'name': CreateVM_Task} progress is 25%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1643.015137] env[62405]: DEBUG oslo_vmware.api [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Task: {'id': task-1947188, 'name': ReconfigVM_Task, 'duration_secs': 0.896493} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1643.023459] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] [instance: c392d6f3-b638-4857-826d-760c38b7d291] Reconfigured VM instance instance-0000002b to attach disk [datastore1] c392d6f3-b638-4857-826d-760c38b7d291/c392d6f3-b638-4857-826d-760c38b7d291.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1643.024828] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-93c01745-ff6e-46a7-ae73-bbfeefe47b9d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.035032] env[62405]: DEBUG oslo_vmware.api [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Waiting for the task: (returnval){ [ 1643.035032] env[62405]: value = "task-1947191" [ 1643.035032] env[62405]: _type = "Task" [ 1643.035032] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1643.045931] env[62405]: DEBUG nova.compute.manager [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] [instance: a9f83357-4898-44ff-a6d8-ea6621453de9] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1643.049947] env[62405]: DEBUG oslo_vmware.api [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Task: {'id': task-1947191, 'name': Rename_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1643.133391] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75b4bb60-25d5-4927-b258-0ce36b8cf0ab {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.142423] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bcaade2-487f-4361-80b2-1c1a4ed1add3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.181477] env[62405]: DEBUG oslo_concurrency.lockutils [req-a4746c62-2ac4-4db7-9fcb-68922f13faab req-77d85f4e-8a31-4954-9c10-f4e0fce2f707 service nova] Releasing lock "refresh_cache-59957a81-5297-43d3-a673-024a53a19116" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1643.182664] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23669ca6-e486-4f2e-8a63-4cf2521e08c9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.198715] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3b42d37-dbcd-4f97-af51-72dbb5fa5df6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.202634] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947189, 'name': CreateVM_Task, 'duration_secs': 0.921821} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1643.202810] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1643.203614] env[62405]: DEBUG oslo_concurrency.lockutils [None req-af6579f0-3bb2-4b49-8579-38d377c84d6d tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1643.203773] env[62405]: DEBUG oslo_concurrency.lockutils [None req-af6579f0-3bb2-4b49-8579-38d377c84d6d tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1643.204110] env[62405]: DEBUG oslo_concurrency.lockutils [None req-af6579f0-3bb2-4b49-8579-38d377c84d6d tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1643.204404] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d9a60cba-c32c-4c32-9c4c-604c8af1498b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.219032] env[62405]: DEBUG nova.compute.provider_tree [None req-b97ea554-13a3-4305-999f-6075250c6fb4 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1643.226159] env[62405]: DEBUG oslo_vmware.api [None req-4458c6a2-0bc9-4d4a-9354-48b1433d6fd3 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Task: {'id': task-1947190, 'name': PowerOffVM_Task, 'duration_secs': 0.251626} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1643.227493] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-4458c6a2-0bc9-4d4a-9354-48b1433d6fd3 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] [instance: d5686d7c-a73f-4e02-8726-eab8221a0eae] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1643.227719] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-4458c6a2-0bc9-4d4a-9354-48b1433d6fd3 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] [instance: d5686d7c-a73f-4e02-8726-eab8221a0eae] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1643.228038] env[62405]: DEBUG oslo_vmware.api [None req-af6579f0-3bb2-4b49-8579-38d377c84d6d tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Waiting for the task: (returnval){ [ 1643.228038] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5273c2ea-359e-5531-b5aa-63dc7d9a02f0" [ 1643.228038] env[62405]: _type = "Task" [ 1643.228038] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1643.228229] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-604d75b5-1645-4961-b576-fd65b277b0e2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.240936] env[62405]: DEBUG oslo_vmware.api [None req-af6579f0-3bb2-4b49-8579-38d377c84d6d tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5273c2ea-359e-5531-b5aa-63dc7d9a02f0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1643.268108] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-4458c6a2-0bc9-4d4a-9354-48b1433d6fd3 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] [instance: d5686d7c-a73f-4e02-8726-eab8221a0eae] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1643.268363] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-4458c6a2-0bc9-4d4a-9354-48b1433d6fd3 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] [instance: d5686d7c-a73f-4e02-8726-eab8221a0eae] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1643.268553] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-4458c6a2-0bc9-4d4a-9354-48b1433d6fd3 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Deleting the datastore file [datastore1] d5686d7c-a73f-4e02-8726-eab8221a0eae {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1643.268816] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a23dcc79-a5f7-416b-a43e-1acd5275deba {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.276135] env[62405]: DEBUG oslo_vmware.api [None req-4458c6a2-0bc9-4d4a-9354-48b1433d6fd3 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Waiting for the task: (returnval){ [ 1643.276135] env[62405]: value = "task-1947193" [ 1643.276135] env[62405]: _type = "Task" [ 1643.276135] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1643.285731] env[62405]: DEBUG oslo_vmware.api [None req-4458c6a2-0bc9-4d4a-9354-48b1433d6fd3 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Task: {'id': task-1947193, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1643.310336] env[62405]: DEBUG nova.network.neutron [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1643.366157] env[62405]: DEBUG nova.network.neutron [-] [instance: 792cd2c8-a67d-4b16-93ab-722fcc8b622d] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1643.418929] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947187, 'name': CreateVM_Task, 'duration_secs': 1.188751} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1643.420676] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 59957a81-5297-43d3-a673-024a53a19116] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1643.421221] env[62405]: DEBUG oslo_concurrency.lockutils [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1643.545236] env[62405]: DEBUG oslo_vmware.api [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Task: {'id': task-1947191, 'name': Rename_Task, 'duration_secs': 0.176276} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1643.545559] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] [instance: c392d6f3-b638-4857-826d-760c38b7d291] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1643.545833] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-68017483-3148-4fc2-a5c8-7e7ec2099f51 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.553808] env[62405]: DEBUG oslo_vmware.api [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Waiting for the task: (returnval){ [ 1643.553808] env[62405]: value = "task-1947194" [ 1643.553808] env[62405]: _type = "Task" [ 1643.553808] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1643.566594] env[62405]: DEBUG oslo_vmware.api [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Task: {'id': task-1947194, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1643.722713] env[62405]: DEBUG nova.scheduler.client.report [None req-b97ea554-13a3-4305-999f-6075250c6fb4 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1643.741978] env[62405]: DEBUG oslo_vmware.api [None req-af6579f0-3bb2-4b49-8579-38d377c84d6d tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5273c2ea-359e-5531-b5aa-63dc7d9a02f0, 'name': SearchDatastore_Task, 'duration_secs': 0.01792} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1643.742947] env[62405]: DEBUG oslo_concurrency.lockutils [None req-af6579f0-3bb2-4b49-8579-38d377c84d6d tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1643.743214] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-af6579f0-3bb2-4b49-8579-38d377c84d6d tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] [instance: 1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1643.743450] env[62405]: DEBUG oslo_concurrency.lockutils [None req-af6579f0-3bb2-4b49-8579-38d377c84d6d tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1643.743599] env[62405]: DEBUG oslo_concurrency.lockutils [None req-af6579f0-3bb2-4b49-8579-38d377c84d6d tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1643.743778] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-af6579f0-3bb2-4b49-8579-38d377c84d6d tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1643.744076] env[62405]: DEBUG oslo_concurrency.lockutils [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1643.744375] env[62405]: DEBUG oslo_concurrency.lockutils [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1643.744899] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d7ade49b-c0d7-4a35-8e8c-dcd0a58b12cd {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.746907] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5cf3877f-9308-4833-b59d-d523282e0cec {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.752631] env[62405]: DEBUG nova.network.neutron [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Updating instance_info_cache with network_info: [{"id": "7fbae16c-e943-4752-8a7e-92bdea130e1a", "address": "fa:16:3e:f9:2e:fa", "network": {"id": "9e3e6e3d-df77-428f-b577-e4a42e82ec43", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.63", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "69dc3a146cd14230b1180689e2fea090", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31e77685-b4dd-4810-80ef-24115ea9ea62", "external-id": "nsx-vlan-transportzone-56", "segmentation_id": 56, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7fbae16c-e9", "ovs_interfaceid": "7fbae16c-e943-4752-8a7e-92bdea130e1a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1643.755104] env[62405]: DEBUG oslo_vmware.api [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Waiting for the task: (returnval){ [ 1643.755104] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]528890cd-4a36-1a33-2629-fefd13beff25" [ 1643.755104] env[62405]: _type = "Task" [ 1643.755104] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1643.761113] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-af6579f0-3bb2-4b49-8579-38d377c84d6d tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1643.761335] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-af6579f0-3bb2-4b49-8579-38d377c84d6d tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1643.762768] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a285d841-620c-4157-9643-95e8f232a228 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.770783] env[62405]: DEBUG oslo_vmware.api [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]528890cd-4a36-1a33-2629-fefd13beff25, 'name': SearchDatastore_Task, 'duration_secs': 0.010265} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1643.771248] env[62405]: DEBUG oslo_concurrency.lockutils [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1643.772097] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59957a81-5297-43d3-a673-024a53a19116] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1643.772326] env[62405]: DEBUG oslo_concurrency.lockutils [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1643.774735] env[62405]: DEBUG oslo_vmware.api [None req-af6579f0-3bb2-4b49-8579-38d377c84d6d tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Waiting for the task: (returnval){ [ 1643.774735] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]526492be-5f19-d5bb-7aac-f19d9e8c47c3" [ 1643.774735] env[62405]: _type = "Task" [ 1643.774735] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1643.790278] env[62405]: DEBUG oslo_vmware.api [None req-4458c6a2-0bc9-4d4a-9354-48b1433d6fd3 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Task: {'id': task-1947193, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.21616} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1643.793387] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-4458c6a2-0bc9-4d4a-9354-48b1433d6fd3 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1643.793936] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-4458c6a2-0bc9-4d4a-9354-48b1433d6fd3 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] [instance: d5686d7c-a73f-4e02-8726-eab8221a0eae] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1643.794218] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-4458c6a2-0bc9-4d4a-9354-48b1433d6fd3 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] [instance: d5686d7c-a73f-4e02-8726-eab8221a0eae] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1643.794412] env[62405]: INFO nova.compute.manager [None req-4458c6a2-0bc9-4d4a-9354-48b1433d6fd3 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] [instance: d5686d7c-a73f-4e02-8726-eab8221a0eae] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1643.794701] env[62405]: DEBUG oslo.service.loopingcall [None req-4458c6a2-0bc9-4d4a-9354-48b1433d6fd3 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1643.795209] env[62405]: DEBUG oslo_vmware.api [None req-af6579f0-3bb2-4b49-8579-38d377c84d6d tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]526492be-5f19-d5bb-7aac-f19d9e8c47c3, 'name': SearchDatastore_Task, 'duration_secs': 0.010844} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1643.795416] env[62405]: DEBUG nova.compute.manager [-] [instance: d5686d7c-a73f-4e02-8726-eab8221a0eae] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1643.795513] env[62405]: DEBUG nova.network.neutron [-] [instance: d5686d7c-a73f-4e02-8726-eab8221a0eae] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1643.798482] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2f4c6475-8521-41e5-aa37-50e7167d59dc {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.805570] env[62405]: DEBUG oslo_vmware.api [None req-af6579f0-3bb2-4b49-8579-38d377c84d6d tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Waiting for the task: (returnval){ [ 1643.805570] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5278905b-b8cc-89c8-869e-95da76abdf39" [ 1643.805570] env[62405]: _type = "Task" [ 1643.805570] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1643.815251] env[62405]: DEBUG oslo_vmware.api [None req-af6579f0-3bb2-4b49-8579-38d377c84d6d tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5278905b-b8cc-89c8-869e-95da76abdf39, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1643.832809] env[62405]: DEBUG nova.network.neutron [-] [instance: d5686d7c-a73f-4e02-8726-eab8221a0eae] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1643.837021] env[62405]: DEBUG nova.network.neutron [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] [instance: a9f83357-4898-44ff-a6d8-ea6621453de9] Successfully created port: 5e6a4310-9a98-402b-bb12-b6ed546139b9 {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1643.870229] env[62405]: INFO nova.compute.manager [-] [instance: 792cd2c8-a67d-4b16-93ab-722fcc8b622d] Took 2.27 seconds to deallocate network for instance. [ 1644.010098] env[62405]: DEBUG nova.compute.manager [req-f2b7ad0e-f82d-4d06-b123-7c1ea64a1017 req-d9844e3b-edb5-43f8-8870-1e6f85ff03d8 service nova] [instance: 792cd2c8-a67d-4b16-93ab-722fcc8b622d] Received event network-vif-deleted-c1f4fd8a-cda2-4206-b706-58f6fa8c722e {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1644.062329] env[62405]: DEBUG nova.compute.manager [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] [instance: a9f83357-4898-44ff-a6d8-ea6621453de9] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1644.069984] env[62405]: DEBUG oslo_vmware.api [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Task: {'id': task-1947194, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1644.102166] env[62405]: DEBUG nova.virt.hardware [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1644.102465] env[62405]: DEBUG nova.virt.hardware [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1644.102655] env[62405]: DEBUG nova.virt.hardware [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1644.103140] env[62405]: DEBUG nova.virt.hardware [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1644.103140] env[62405]: DEBUG nova.virt.hardware [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1644.103140] env[62405]: DEBUG nova.virt.hardware [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1644.104266] env[62405]: DEBUG nova.virt.hardware [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1644.104562] env[62405]: DEBUG nova.virt.hardware [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1644.104855] env[62405]: DEBUG nova.virt.hardware [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1644.105052] env[62405]: DEBUG nova.virt.hardware [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1644.105234] env[62405]: DEBUG nova.virt.hardware [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1644.106229] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-427400a2-a133-4955-bcf8-449a98d1d231 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.115637] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbb7bce0-779d-411e-ae8b-4f53c352fccb {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.228038] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b97ea554-13a3-4305-999f-6075250c6fb4 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.189s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1644.230541] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2031dff1-3b10-4be6-a3b5-1b62ae041342 tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.219s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1644.230831] env[62405]: DEBUG nova.objects.instance [None req-2031dff1-3b10-4be6-a3b5-1b62ae041342 tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Lazy-loading 'resources' on Instance uuid 0feaeb5d-9f4a-4166-99b1-f213bc4fa458 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1644.257190] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Releasing lock "refresh_cache-3c9487ff-2092-4cde-82d5-b38e5bc5c6e3" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1644.257596] env[62405]: DEBUG nova.compute.manager [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Instance network_info: |[{"id": "7fbae16c-e943-4752-8a7e-92bdea130e1a", "address": "fa:16:3e:f9:2e:fa", "network": {"id": "9e3e6e3d-df77-428f-b577-e4a42e82ec43", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.63", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "69dc3a146cd14230b1180689e2fea090", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31e77685-b4dd-4810-80ef-24115ea9ea62", "external-id": "nsx-vlan-transportzone-56", "segmentation_id": 56, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7fbae16c-e9", "ovs_interfaceid": "7fbae16c-e943-4752-8a7e-92bdea130e1a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1644.258188] env[62405]: DEBUG oslo_concurrency.lockutils [req-47045816-0e34-4932-a9c3-64cfbbee3bc9 req-96a6d54b-f671-450a-b82a-723a5fd9432c service nova] Acquired lock "refresh_cache-3c9487ff-2092-4cde-82d5-b38e5bc5c6e3" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1644.258363] env[62405]: DEBUG nova.network.neutron [req-47045816-0e34-4932-a9c3-64cfbbee3bc9 req-96a6d54b-f671-450a-b82a-723a5fd9432c service nova] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Refreshing network info cache for port 7fbae16c-e943-4752-8a7e-92bdea130e1a {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1644.259520] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f9:2e:fa', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '31e77685-b4dd-4810-80ef-24115ea9ea62', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7fbae16c-e943-4752-8a7e-92bdea130e1a', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1644.267315] env[62405]: DEBUG oslo.service.loopingcall [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1644.268835] env[62405]: INFO nova.scheduler.client.report [None req-b97ea554-13a3-4305-999f-6075250c6fb4 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Deleted allocations for instance b3647042-89a1-4d15-b85e-49a5c8def1d4 [ 1644.269966] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1644.270514] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8a51ecfa-b6e4-466e-a254-e4637043b3c4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.295258] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1644.295258] env[62405]: value = "task-1947195" [ 1644.295258] env[62405]: _type = "Task" [ 1644.295258] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1644.308248] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947195, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1644.319275] env[62405]: DEBUG oslo_vmware.api [None req-af6579f0-3bb2-4b49-8579-38d377c84d6d tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5278905b-b8cc-89c8-869e-95da76abdf39, 'name': SearchDatastore_Task, 'duration_secs': 0.009688} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1644.319638] env[62405]: DEBUG oslo_concurrency.lockutils [None req-af6579f0-3bb2-4b49-8579-38d377c84d6d tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1644.319967] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-af6579f0-3bb2-4b49-8579-38d377c84d6d tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac/1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1644.320731] env[62405]: DEBUG oslo_concurrency.lockutils [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1644.320985] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1644.321270] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-819a69f0-ff49-4c9d-9649-599fd6053054 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.325271] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-363858fb-4148-4469-921b-cb43b66c5bb6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.334800] env[62405]: DEBUG oslo_vmware.api [None req-af6579f0-3bb2-4b49-8579-38d377c84d6d tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Waiting for the task: (returnval){ [ 1644.334800] env[62405]: value = "task-1947196" [ 1644.334800] env[62405]: _type = "Task" [ 1644.334800] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1644.343018] env[62405]: DEBUG nova.network.neutron [-] [instance: d5686d7c-a73f-4e02-8726-eab8221a0eae] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1644.344674] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1644.344926] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1644.346620] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5e6e3d98-377e-497b-9108-e5d562c4a1bb {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.353749] env[62405]: DEBUG oslo_vmware.api [None req-af6579f0-3bb2-4b49-8579-38d377c84d6d tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Task: {'id': task-1947196, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1644.359384] env[62405]: DEBUG oslo_vmware.api [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Waiting for the task: (returnval){ [ 1644.359384] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52e5d43f-bf1f-7a7e-eeb0-6b90440b3a61" [ 1644.359384] env[62405]: _type = "Task" [ 1644.359384] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1644.371175] env[62405]: DEBUG oslo_vmware.api [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52e5d43f-bf1f-7a7e-eeb0-6b90440b3a61, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1644.382243] env[62405]: DEBUG oslo_concurrency.lockutils [None req-341dd44c-33cc-4493-9805-6cc4286b9cf6 tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1644.568508] env[62405]: DEBUG oslo_vmware.api [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Task: {'id': task-1947194, 'name': PowerOnVM_Task, 'duration_secs': 0.592915} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1644.568508] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] [instance: c392d6f3-b638-4857-826d-760c38b7d291] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1644.568508] env[62405]: INFO nova.compute.manager [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] [instance: c392d6f3-b638-4857-826d-760c38b7d291] Took 8.79 seconds to spawn the instance on the hypervisor. [ 1644.569015] env[62405]: DEBUG nova.compute.manager [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] [instance: c392d6f3-b638-4857-826d-760c38b7d291] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1644.569779] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d9db28c-c698-44a4-a947-2ce86f96270a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.796196] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b97ea554-13a3-4305-999f-6075250c6fb4 tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Lock "b3647042-89a1-4d15-b85e-49a5c8def1d4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.541s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1644.817117] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947195, 'name': CreateVM_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1644.851106] env[62405]: INFO nova.compute.manager [-] [instance: d5686d7c-a73f-4e02-8726-eab8221a0eae] Took 1.06 seconds to deallocate network for instance. [ 1644.851517] env[62405]: DEBUG oslo_vmware.api [None req-af6579f0-3bb2-4b49-8579-38d377c84d6d tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Task: {'id': task-1947196, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.511694} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1644.853845] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-af6579f0-3bb2-4b49-8579-38d377c84d6d tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac/1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1644.854107] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-af6579f0-3bb2-4b49-8579-38d377c84d6d tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] [instance: 1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1644.854768] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8385192f-6851-4a68-a8ab-e48faa8be97c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.876232] env[62405]: DEBUG oslo_vmware.api [None req-af6579f0-3bb2-4b49-8579-38d377c84d6d tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Waiting for the task: (returnval){ [ 1644.876232] env[62405]: value = "task-1947197" [ 1644.876232] env[62405]: _type = "Task" [ 1644.876232] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1644.883994] env[62405]: DEBUG oslo_vmware.api [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52e5d43f-bf1f-7a7e-eeb0-6b90440b3a61, 'name': SearchDatastore_Task, 'duration_secs': 0.012779} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1644.887121] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6b048e26-63f5-4762-8865-d19015574827 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.893257] env[62405]: DEBUG oslo_vmware.api [None req-af6579f0-3bb2-4b49-8579-38d377c84d6d tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Task: {'id': task-1947197, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1644.898134] env[62405]: DEBUG oslo_vmware.api [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Waiting for the task: (returnval){ [ 1644.898134] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]529c6691-1c9f-1e38-35f9-261fd80bfa2c" [ 1644.898134] env[62405]: _type = "Task" [ 1644.898134] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1644.907539] env[62405]: DEBUG oslo_vmware.api [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]529c6691-1c9f-1e38-35f9-261fd80bfa2c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1645.096898] env[62405]: INFO nova.compute.manager [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] [instance: c392d6f3-b638-4857-826d-760c38b7d291] Took 40.84 seconds to build instance. [ 1645.186382] env[62405]: DEBUG nova.network.neutron [req-47045816-0e34-4932-a9c3-64cfbbee3bc9 req-96a6d54b-f671-450a-b82a-723a5fd9432c service nova] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Updated VIF entry in instance network info cache for port 7fbae16c-e943-4752-8a7e-92bdea130e1a. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1645.186935] env[62405]: DEBUG nova.network.neutron [req-47045816-0e34-4932-a9c3-64cfbbee3bc9 req-96a6d54b-f671-450a-b82a-723a5fd9432c service nova] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Updating instance_info_cache with network_info: [{"id": "7fbae16c-e943-4752-8a7e-92bdea130e1a", "address": "fa:16:3e:f9:2e:fa", "network": {"id": "9e3e6e3d-df77-428f-b577-e4a42e82ec43", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.63", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "69dc3a146cd14230b1180689e2fea090", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31e77685-b4dd-4810-80ef-24115ea9ea62", "external-id": "nsx-vlan-transportzone-56", "segmentation_id": 56, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7fbae16c-e9", "ovs_interfaceid": "7fbae16c-e943-4752-8a7e-92bdea130e1a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1645.316945] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947195, 'name': CreateVM_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1645.327922] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd5cecdf-d444-4149-b938-b495de828f61 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.336405] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9ab158e-edff-4740-8500-8e19409d8660 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.373108] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4458c6a2-0bc9-4d4a-9354-48b1433d6fd3 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1645.374215] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0032016-9f25-4cba-b450-f6d0f7a766eb {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.383792] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9187bf31-6825-4962-9cac-7ab1944e4945 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.391124] env[62405]: DEBUG oslo_vmware.api [None req-af6579f0-3bb2-4b49-8579-38d377c84d6d tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Task: {'id': task-1947197, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1645.400223] env[62405]: DEBUG nova.compute.provider_tree [None req-2031dff1-3b10-4be6-a3b5-1b62ae041342 tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1645.410196] env[62405]: DEBUG oslo_vmware.api [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]529c6691-1c9f-1e38-35f9-261fd80bfa2c, 'name': SearchDatastore_Task, 'duration_secs': 0.009764} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1645.411069] env[62405]: DEBUG oslo_concurrency.lockutils [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1645.411335] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 59957a81-5297-43d3-a673-024a53a19116/59957a81-5297-43d3-a673-024a53a19116.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1645.411605] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8b65b97a-718d-4ac9-a4f4-0055c592607e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.419356] env[62405]: DEBUG oslo_vmware.api [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Waiting for the task: (returnval){ [ 1645.419356] env[62405]: value = "task-1947198" [ 1645.419356] env[62405]: _type = "Task" [ 1645.419356] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1645.428536] env[62405]: DEBUG oslo_vmware.api [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947198, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1645.604052] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d8f26ade-7f6b-402e-adb2-4cc815384ba8 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Lock "c392d6f3-b638-4857-826d-760c38b7d291" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 59.694s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1645.691515] env[62405]: DEBUG oslo_concurrency.lockutils [req-47045816-0e34-4932-a9c3-64cfbbee3bc9 req-96a6d54b-f671-450a-b82a-723a5fd9432c service nova] Releasing lock "refresh_cache-3c9487ff-2092-4cde-82d5-b38e5bc5c6e3" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1645.814033] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947195, 'name': CreateVM_Task, 'duration_secs': 1.500727} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1645.814033] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1645.814205] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1645.815054] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1645.815054] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1645.815054] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6f732dba-35ed-4e2f-8bf5-f2a822edf08b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.823346] env[62405]: DEBUG nova.network.neutron [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] [instance: a9f83357-4898-44ff-a6d8-ea6621453de9] Successfully updated port: 5e6a4310-9a98-402b-bb12-b6ed546139b9 {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1645.829708] env[62405]: DEBUG oslo_vmware.api [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Waiting for the task: (returnval){ [ 1645.829708] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52362dde-6755-3fc1-3f16-f6b7345e4b08" [ 1645.829708] env[62405]: _type = "Task" [ 1645.829708] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1645.842897] env[62405]: DEBUG oslo_vmware.api [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52362dde-6755-3fc1-3f16-f6b7345e4b08, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1645.889497] env[62405]: DEBUG oslo_vmware.api [None req-af6579f0-3bb2-4b49-8579-38d377c84d6d tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Task: {'id': task-1947197, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.899019} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1645.889813] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-af6579f0-3bb2-4b49-8579-38d377c84d6d tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] [instance: 1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1645.890783] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58ed4999-cde2-436e-853b-fe407350fd89 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.913626] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-af6579f0-3bb2-4b49-8579-38d377c84d6d tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] [instance: 1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac] Reconfiguring VM instance instance-0000002a to attach disk [datastore1] 1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac/1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1645.915112] env[62405]: DEBUG nova.scheduler.client.report [None req-2031dff1-3b10-4be6-a3b5-1b62ae041342 tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1645.917973] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d8fb9bf9-6386-4c52-852d-cebe0cc7391a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.944855] env[62405]: DEBUG oslo_vmware.api [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947198, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1645.946854] env[62405]: DEBUG oslo_vmware.api [None req-af6579f0-3bb2-4b49-8579-38d377c84d6d tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Waiting for the task: (returnval){ [ 1645.946854] env[62405]: value = "task-1947199" [ 1645.946854] env[62405]: _type = "Task" [ 1645.946854] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1645.957388] env[62405]: DEBUG oslo_vmware.api [None req-af6579f0-3bb2-4b49-8579-38d377c84d6d tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Task: {'id': task-1947199, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1646.145745] env[62405]: DEBUG oslo_concurrency.lockutils [None req-40310896-b13c-4525-845a-9678f76700fa tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Acquiring lock "b8ff115b-64f1-4584-afa2-478c5e6b726b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1646.146193] env[62405]: DEBUG oslo_concurrency.lockutils [None req-40310896-b13c-4525-845a-9678f76700fa tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Lock "b8ff115b-64f1-4584-afa2-478c5e6b726b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1646.146626] env[62405]: DEBUG oslo_concurrency.lockutils [None req-40310896-b13c-4525-845a-9678f76700fa tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Acquiring lock "b8ff115b-64f1-4584-afa2-478c5e6b726b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1646.147106] env[62405]: DEBUG oslo_concurrency.lockutils [None req-40310896-b13c-4525-845a-9678f76700fa tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Lock "b8ff115b-64f1-4584-afa2-478c5e6b726b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1646.147362] env[62405]: DEBUG oslo_concurrency.lockutils [None req-40310896-b13c-4525-845a-9678f76700fa tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Lock "b8ff115b-64f1-4584-afa2-478c5e6b726b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1646.153300] env[62405]: INFO nova.compute.manager [None req-40310896-b13c-4525-845a-9678f76700fa tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b8ff115b-64f1-4584-afa2-478c5e6b726b] Terminating instance [ 1646.204021] env[62405]: DEBUG nova.compute.manager [req-a7980c05-ef5d-4fe3-9afb-9e2ddeb388da req-e41fdc87-5ce6-4227-9e6f-97f87acf4819 service nova] [instance: a9f83357-4898-44ff-a6d8-ea6621453de9] Received event network-vif-plugged-5e6a4310-9a98-402b-bb12-b6ed546139b9 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1646.204021] env[62405]: DEBUG oslo_concurrency.lockutils [req-a7980c05-ef5d-4fe3-9afb-9e2ddeb388da req-e41fdc87-5ce6-4227-9e6f-97f87acf4819 service nova] Acquiring lock "a9f83357-4898-44ff-a6d8-ea6621453de9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1646.204021] env[62405]: DEBUG oslo_concurrency.lockutils [req-a7980c05-ef5d-4fe3-9afb-9e2ddeb388da req-e41fdc87-5ce6-4227-9e6f-97f87acf4819 service nova] Lock "a9f83357-4898-44ff-a6d8-ea6621453de9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1646.204021] env[62405]: DEBUG oslo_concurrency.lockutils [req-a7980c05-ef5d-4fe3-9afb-9e2ddeb388da req-e41fdc87-5ce6-4227-9e6f-97f87acf4819 service nova] Lock "a9f83357-4898-44ff-a6d8-ea6621453de9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1646.204021] env[62405]: DEBUG nova.compute.manager [req-a7980c05-ef5d-4fe3-9afb-9e2ddeb388da req-e41fdc87-5ce6-4227-9e6f-97f87acf4819 service nova] [instance: a9f83357-4898-44ff-a6d8-ea6621453de9] No waiting events found dispatching network-vif-plugged-5e6a4310-9a98-402b-bb12-b6ed546139b9 {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1646.204551] env[62405]: WARNING nova.compute.manager [req-a7980c05-ef5d-4fe3-9afb-9e2ddeb388da req-e41fdc87-5ce6-4227-9e6f-97f87acf4819 service nova] [instance: a9f83357-4898-44ff-a6d8-ea6621453de9] Received unexpected event network-vif-plugged-5e6a4310-9a98-402b-bb12-b6ed546139b9 for instance with vm_state building and task_state spawning. [ 1646.204551] env[62405]: DEBUG nova.compute.manager [req-a7980c05-ef5d-4fe3-9afb-9e2ddeb388da req-e41fdc87-5ce6-4227-9e6f-97f87acf4819 service nova] [instance: a9f83357-4898-44ff-a6d8-ea6621453de9] Received event network-changed-5e6a4310-9a98-402b-bb12-b6ed546139b9 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1646.204551] env[62405]: DEBUG nova.compute.manager [req-a7980c05-ef5d-4fe3-9afb-9e2ddeb388da req-e41fdc87-5ce6-4227-9e6f-97f87acf4819 service nova] [instance: a9f83357-4898-44ff-a6d8-ea6621453de9] Refreshing instance network info cache due to event network-changed-5e6a4310-9a98-402b-bb12-b6ed546139b9. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1646.204551] env[62405]: DEBUG oslo_concurrency.lockutils [req-a7980c05-ef5d-4fe3-9afb-9e2ddeb388da req-e41fdc87-5ce6-4227-9e6f-97f87acf4819 service nova] Acquiring lock "refresh_cache-a9f83357-4898-44ff-a6d8-ea6621453de9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1646.204551] env[62405]: DEBUG oslo_concurrency.lockutils [req-a7980c05-ef5d-4fe3-9afb-9e2ddeb388da req-e41fdc87-5ce6-4227-9e6f-97f87acf4819 service nova] Acquired lock "refresh_cache-a9f83357-4898-44ff-a6d8-ea6621453de9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1646.204746] env[62405]: DEBUG nova.network.neutron [req-a7980c05-ef5d-4fe3-9afb-9e2ddeb388da req-e41fdc87-5ce6-4227-9e6f-97f87acf4819 service nova] [instance: a9f83357-4898-44ff-a6d8-ea6621453de9] Refreshing network info cache for port 5e6a4310-9a98-402b-bb12-b6ed546139b9 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1646.292438] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Acquiring lock "b4693268-4d12-4c96-a8f9-7b1bb9705c89" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1646.292438] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Lock "b4693268-4d12-4c96-a8f9-7b1bb9705c89" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1646.330593] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Acquiring lock "refresh_cache-a9f83357-4898-44ff-a6d8-ea6621453de9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1646.351181] env[62405]: DEBUG oslo_vmware.api [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52362dde-6755-3fc1-3f16-f6b7345e4b08, 'name': SearchDatastore_Task, 'duration_secs': 0.056131} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1646.352706] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1646.353127] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1646.353513] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1646.354330] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1646.354330] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1646.354903] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2f0d80d0-da82-48d6-966e-fa2b6dd2aae3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.373191] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1646.373191] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1646.373722] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9b69fe51-c799-4e8d-8b50-79ce73da4023 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.381467] env[62405]: DEBUG oslo_vmware.api [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Waiting for the task: (returnval){ [ 1646.381467] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]523cc580-78cd-4e64-ad5d-7852c8cd2c9e" [ 1646.381467] env[62405]: _type = "Task" [ 1646.381467] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1646.390652] env[62405]: DEBUG oslo_vmware.api [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]523cc580-78cd-4e64-ad5d-7852c8cd2c9e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1646.433714] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2031dff1-3b10-4be6-a3b5-1b62ae041342 tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.203s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1646.436307] env[62405]: DEBUG oslo_concurrency.lockutils [None req-91a773c1-df78-4e8b-9e9d-bfbd423aeaaa tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.129s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1646.436603] env[62405]: DEBUG nova.objects.instance [None req-91a773c1-df78-4e8b-9e9d-bfbd423aeaaa tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Lazy-loading 'resources' on Instance uuid 262424b0-dc7d-4b6c-9539-2d6cd23a93da {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1646.448970] env[62405]: DEBUG oslo_vmware.api [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947198, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.571314} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1646.455702] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 59957a81-5297-43d3-a673-024a53a19116/59957a81-5297-43d3-a673-024a53a19116.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1646.456105] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59957a81-5297-43d3-a673-024a53a19116] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1646.457195] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a3b7d345-219b-4985-9d51-87fc73a55f54 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.467971] env[62405]: DEBUG oslo_vmware.api [None req-af6579f0-3bb2-4b49-8579-38d377c84d6d tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Task: {'id': task-1947199, 'name': ReconfigVM_Task, 'duration_secs': 0.331586} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1646.470144] env[62405]: INFO nova.scheduler.client.report [None req-2031dff1-3b10-4be6-a3b5-1b62ae041342 tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Deleted allocations for instance 0feaeb5d-9f4a-4166-99b1-f213bc4fa458 [ 1646.471740] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-af6579f0-3bb2-4b49-8579-38d377c84d6d tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] [instance: 1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac] Reconfigured VM instance instance-0000002a to attach disk [datastore1] 1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac/1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1646.474269] env[62405]: DEBUG oslo_vmware.api [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Waiting for the task: (returnval){ [ 1646.474269] env[62405]: value = "task-1947200" [ 1646.474269] env[62405]: _type = "Task" [ 1646.474269] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1646.479049] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-801b21ff-fc4b-4067-98ad-40c07fef1f7a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.495925] env[62405]: DEBUG oslo_vmware.api [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947200, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1646.498086] env[62405]: DEBUG oslo_vmware.api [None req-af6579f0-3bb2-4b49-8579-38d377c84d6d tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Waiting for the task: (returnval){ [ 1646.498086] env[62405]: value = "task-1947201" [ 1646.498086] env[62405]: _type = "Task" [ 1646.498086] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1646.511649] env[62405]: DEBUG oslo_vmware.api [None req-af6579f0-3bb2-4b49-8579-38d377c84d6d tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Task: {'id': task-1947201, 'name': Rename_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1646.659594] env[62405]: DEBUG nova.compute.manager [None req-40310896-b13c-4525-845a-9678f76700fa tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b8ff115b-64f1-4584-afa2-478c5e6b726b] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1646.659946] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-40310896-b13c-4525-845a-9678f76700fa tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b8ff115b-64f1-4584-afa2-478c5e6b726b] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1646.661113] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f772024-2f3f-49be-8940-7420a50fd263 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.671694] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-40310896-b13c-4525-845a-9678f76700fa tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b8ff115b-64f1-4584-afa2-478c5e6b726b] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1646.672034] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f918a118-d4d8-45e9-9ef3-81a1c1f176cd {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.679600] env[62405]: DEBUG oslo_vmware.api [None req-40310896-b13c-4525-845a-9678f76700fa tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Waiting for the task: (returnval){ [ 1646.679600] env[62405]: value = "task-1947202" [ 1646.679600] env[62405]: _type = "Task" [ 1646.679600] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1646.690299] env[62405]: DEBUG oslo_vmware.api [None req-40310896-b13c-4525-845a-9678f76700fa tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': task-1947202, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1646.739913] env[62405]: DEBUG nova.network.neutron [req-a7980c05-ef5d-4fe3-9afb-9e2ddeb388da req-e41fdc87-5ce6-4227-9e6f-97f87acf4819 service nova] [instance: a9f83357-4898-44ff-a6d8-ea6621453de9] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1646.795021] env[62405]: DEBUG nova.compute.manager [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] [instance: b4693268-4d12-4c96-a8f9-7b1bb9705c89] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1646.806442] env[62405]: DEBUG nova.network.neutron [req-a7980c05-ef5d-4fe3-9afb-9e2ddeb388da req-e41fdc87-5ce6-4227-9e6f-97f87acf4819 service nova] [instance: a9f83357-4898-44ff-a6d8-ea6621453de9] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1646.844985] env[62405]: DEBUG oslo_concurrency.lockutils [None req-49ce2ce0-492a-4a46-bff5-fa35bfab5354 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Acquiring lock "c392d6f3-b638-4857-826d-760c38b7d291" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1646.849019] env[62405]: DEBUG oslo_concurrency.lockutils [None req-49ce2ce0-492a-4a46-bff5-fa35bfab5354 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Lock "c392d6f3-b638-4857-826d-760c38b7d291" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1646.849019] env[62405]: INFO nova.compute.manager [None req-49ce2ce0-492a-4a46-bff5-fa35bfab5354 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] [instance: c392d6f3-b638-4857-826d-760c38b7d291] Rebooting instance [ 1646.893867] env[62405]: DEBUG oslo_vmware.api [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]523cc580-78cd-4e64-ad5d-7852c8cd2c9e, 'name': SearchDatastore_Task, 'duration_secs': 0.058099} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1646.894793] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-96498abc-748c-4d6c-8ffb-3069b7d507de {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.901742] env[62405]: DEBUG oslo_vmware.api [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Waiting for the task: (returnval){ [ 1646.901742] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52a142da-4d72-0e1c-1788-174eb55cebff" [ 1646.901742] env[62405]: _type = "Task" [ 1646.901742] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1646.913074] env[62405]: DEBUG oslo_vmware.api [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52a142da-4d72-0e1c-1788-174eb55cebff, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1646.994025] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2031dff1-3b10-4be6-a3b5-1b62ae041342 tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Lock "0feaeb5d-9f4a-4166-99b1-f213bc4fa458" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.982s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1647.003993] env[62405]: DEBUG oslo_vmware.api [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947200, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078289} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1647.008271] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59957a81-5297-43d3-a673-024a53a19116] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1647.011707] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6233ec30-56c8-4c5c-8963-f94b4e5b9a3d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.020340] env[62405]: DEBUG oslo_vmware.api [None req-af6579f0-3bb2-4b49-8579-38d377c84d6d tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Task: {'id': task-1947201, 'name': Rename_Task, 'duration_secs': 0.178256} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1647.032151] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-af6579f0-3bb2-4b49-8579-38d377c84d6d tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] [instance: 1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1647.040536] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59957a81-5297-43d3-a673-024a53a19116] Reconfiguring VM instance instance-0000002c to attach disk [datastore1] 59957a81-5297-43d3-a673-024a53a19116/59957a81-5297-43d3-a673-024a53a19116.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1647.044361] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d8c7743c-6679-4fbf-ace9-74686aef8db2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.045585] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2d54b20a-0d74-4c68-a326-edc8e16ac9b1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.068256] env[62405]: DEBUG oslo_vmware.api [None req-af6579f0-3bb2-4b49-8579-38d377c84d6d tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Waiting for the task: (returnval){ [ 1647.068256] env[62405]: value = "task-1947203" [ 1647.068256] env[62405]: _type = "Task" [ 1647.068256] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1647.069723] env[62405]: DEBUG oslo_vmware.api [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Waiting for the task: (returnval){ [ 1647.069723] env[62405]: value = "task-1947204" [ 1647.069723] env[62405]: _type = "Task" [ 1647.069723] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1647.086400] env[62405]: DEBUG oslo_vmware.api [None req-af6579f0-3bb2-4b49-8579-38d377c84d6d tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Task: {'id': task-1947203, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1647.086648] env[62405]: DEBUG oslo_vmware.api [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947204, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1647.190856] env[62405]: DEBUG oslo_vmware.api [None req-40310896-b13c-4525-845a-9678f76700fa tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': task-1947202, 'name': PowerOffVM_Task, 'duration_secs': 0.356163} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1647.191315] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-40310896-b13c-4525-845a-9678f76700fa tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b8ff115b-64f1-4584-afa2-478c5e6b726b] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1647.191479] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-40310896-b13c-4525-845a-9678f76700fa tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b8ff115b-64f1-4584-afa2-478c5e6b726b] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1647.192655] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c177ba21-ef24-45e1-8c34-8a85eaab332e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.275878] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-40310896-b13c-4525-845a-9678f76700fa tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b8ff115b-64f1-4584-afa2-478c5e6b726b] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1647.276289] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-40310896-b13c-4525-845a-9678f76700fa tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b8ff115b-64f1-4584-afa2-478c5e6b726b] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1647.277047] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-40310896-b13c-4525-845a-9678f76700fa tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Deleting the datastore file [datastore1] b8ff115b-64f1-4584-afa2-478c5e6b726b {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1647.277047] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f15b815a-6e45-4dd0-aa2a-f8ca9f47a018 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.284702] env[62405]: DEBUG oslo_vmware.api [None req-40310896-b13c-4525-845a-9678f76700fa tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Waiting for the task: (returnval){ [ 1647.284702] env[62405]: value = "task-1947206" [ 1647.284702] env[62405]: _type = "Task" [ 1647.284702] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1647.300912] env[62405]: DEBUG oslo_vmware.api [None req-40310896-b13c-4525-845a-9678f76700fa tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': task-1947206, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1647.310557] env[62405]: DEBUG oslo_concurrency.lockutils [req-a7980c05-ef5d-4fe3-9afb-9e2ddeb388da req-e41fdc87-5ce6-4227-9e6f-97f87acf4819 service nova] Releasing lock "refresh_cache-a9f83357-4898-44ff-a6d8-ea6621453de9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1647.310557] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Acquired lock "refresh_cache-a9f83357-4898-44ff-a6d8-ea6621453de9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1647.310557] env[62405]: DEBUG nova.network.neutron [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] [instance: a9f83357-4898-44ff-a6d8-ea6621453de9] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1647.313038] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1647.366849] env[62405]: DEBUG oslo_concurrency.lockutils [None req-49ce2ce0-492a-4a46-bff5-fa35bfab5354 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Acquiring lock "refresh_cache-c392d6f3-b638-4857-826d-760c38b7d291" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1647.367077] env[62405]: DEBUG oslo_concurrency.lockutils [None req-49ce2ce0-492a-4a46-bff5-fa35bfab5354 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Acquired lock "refresh_cache-c392d6f3-b638-4857-826d-760c38b7d291" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1647.367268] env[62405]: DEBUG nova.network.neutron [None req-49ce2ce0-492a-4a46-bff5-fa35bfab5354 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] [instance: c392d6f3-b638-4857-826d-760c38b7d291] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1647.414352] env[62405]: DEBUG oslo_vmware.api [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52a142da-4d72-0e1c-1788-174eb55cebff, 'name': SearchDatastore_Task, 'duration_secs': 0.027891} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1647.417816] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1647.418100] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3/3c9487ff-2092-4cde-82d5-b38e5bc5c6e3.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1647.418599] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3a339fb5-26f6-4292-a7c5-efa75681872a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.430435] env[62405]: DEBUG oslo_vmware.api [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Waiting for the task: (returnval){ [ 1647.430435] env[62405]: value = "task-1947207" [ 1647.430435] env[62405]: _type = "Task" [ 1647.430435] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1647.445086] env[62405]: DEBUG oslo_vmware.api [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Task: {'id': task-1947207, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1647.465025] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f747d8f0-09e9-4949-beee-4001d20ec376 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.473033] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-163522c4-d6d2-4a50-9538-581e9e78cc43 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.503719] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4a77871-2596-4966-bc69-e172bcdf20c4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.515026] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84aeb984-55a2-4517-9ccf-9af9dfbaf64d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.519904] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d0f951b3-4b7b-4f68-89cd-afb0e4e347fd tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Acquiring lock "777ddb84-25b9-4da6-be6b-a2289dbf510a" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1647.520158] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d0f951b3-4b7b-4f68-89cd-afb0e4e347fd tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Lock "777ddb84-25b9-4da6-be6b-a2289dbf510a" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1647.529585] env[62405]: DEBUG nova.compute.provider_tree [None req-91a773c1-df78-4e8b-9e9d-bfbd423aeaaa tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1647.583722] env[62405]: DEBUG oslo_vmware.api [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947204, 'name': ReconfigVM_Task, 'duration_secs': 0.382721} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1647.583983] env[62405]: DEBUG oslo_vmware.api [None req-af6579f0-3bb2-4b49-8579-38d377c84d6d tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Task: {'id': task-1947203, 'name': PowerOnVM_Task, 'duration_secs': 0.491115} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1647.584306] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59957a81-5297-43d3-a673-024a53a19116] Reconfigured VM instance instance-0000002c to attach disk [datastore1] 59957a81-5297-43d3-a673-024a53a19116/59957a81-5297-43d3-a673-024a53a19116.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1647.585663] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-af6579f0-3bb2-4b49-8579-38d377c84d6d tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] [instance: 1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1647.585899] env[62405]: DEBUG nova.compute.manager [None req-af6579f0-3bb2-4b49-8579-38d377c84d6d tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] [instance: 1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1647.586243] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0b921f06-98a2-4588-801c-2aa6d972b8ac {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.588451] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d817f37-9748-4ff6-b319-0e70ea23f64b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.599685] env[62405]: DEBUG oslo_vmware.api [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Waiting for the task: (returnval){ [ 1647.599685] env[62405]: value = "task-1947208" [ 1647.599685] env[62405]: _type = "Task" [ 1647.599685] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1647.609699] env[62405]: DEBUG oslo_vmware.api [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947208, 'name': Rename_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1647.796419] env[62405]: DEBUG oslo_vmware.api [None req-40310896-b13c-4525-845a-9678f76700fa tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Task: {'id': task-1947206, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.169463} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1647.796767] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-40310896-b13c-4525-845a-9678f76700fa tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1647.797070] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-40310896-b13c-4525-845a-9678f76700fa tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b8ff115b-64f1-4584-afa2-478c5e6b726b] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1647.797310] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-40310896-b13c-4525-845a-9678f76700fa tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b8ff115b-64f1-4584-afa2-478c5e6b726b] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1647.797536] env[62405]: INFO nova.compute.manager [None req-40310896-b13c-4525-845a-9678f76700fa tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] [instance: b8ff115b-64f1-4584-afa2-478c5e6b726b] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1647.797853] env[62405]: DEBUG oslo.service.loopingcall [None req-40310896-b13c-4525-845a-9678f76700fa tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1647.798085] env[62405]: DEBUG nova.compute.manager [-] [instance: b8ff115b-64f1-4584-afa2-478c5e6b726b] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1647.798213] env[62405]: DEBUG nova.network.neutron [-] [instance: b8ff115b-64f1-4584-afa2-478c5e6b726b] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1647.897986] env[62405]: DEBUG nova.network.neutron [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] [instance: a9f83357-4898-44ff-a6d8-ea6621453de9] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1647.940724] env[62405]: DEBUG oslo_vmware.api [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Task: {'id': task-1947207, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1648.031995] env[62405]: DEBUG nova.compute.utils [None req-d0f951b3-4b7b-4f68-89cd-afb0e4e347fd tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1648.033713] env[62405]: DEBUG nova.scheduler.client.report [None req-91a773c1-df78-4e8b-9e9d-bfbd423aeaaa tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1648.087587] env[62405]: DEBUG nova.network.neutron [None req-49ce2ce0-492a-4a46-bff5-fa35bfab5354 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] [instance: c392d6f3-b638-4857-826d-760c38b7d291] Updating instance_info_cache with network_info: [{"id": "e766daac-fbcb-489e-aef5-d97530246eb0", "address": "fa:16:3e:bf:a1:1e", "network": {"id": "4a1adf8e-9b11-47cf-a09e-910b0fd2b5ed", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-2001588294-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "56442ca63108497d97070d582050f97b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee9ce73d-4ee8-4b28-b7d3-3a5735039627", "external-id": "cl2-zone-465", "segmentation_id": 465, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape766daac-fb", "ovs_interfaceid": "e766daac-fbcb-489e-aef5-d97530246eb0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1648.116342] env[62405]: DEBUG oslo_concurrency.lockutils [None req-af6579f0-3bb2-4b49-8579-38d377c84d6d tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1648.122957] env[62405]: DEBUG oslo_vmware.api [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947208, 'name': Rename_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1648.200660] env[62405]: DEBUG nova.network.neutron [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] [instance: a9f83357-4898-44ff-a6d8-ea6621453de9] Updating instance_info_cache with network_info: [{"id": "5e6a4310-9a98-402b-bb12-b6ed546139b9", "address": "fa:16:3e:08:94:fd", "network": {"id": "9e3e6e3d-df77-428f-b577-e4a42e82ec43", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.112", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "69dc3a146cd14230b1180689e2fea090", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31e77685-b4dd-4810-80ef-24115ea9ea62", "external-id": "nsx-vlan-transportzone-56", "segmentation_id": 56, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5e6a4310-9a", "ovs_interfaceid": "5e6a4310-9a98-402b-bb12-b6ed546139b9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1648.442103] env[62405]: DEBUG oslo_vmware.api [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Task: {'id': task-1947207, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.548116} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1648.442379] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3/3c9487ff-2092-4cde-82d5-b38e5bc5c6e3.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1648.442595] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1648.442849] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5bb57f07-cf99-415a-b258-c1c8ed6bcb03 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.450242] env[62405]: DEBUG oslo_vmware.api [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Waiting for the task: (returnval){ [ 1648.450242] env[62405]: value = "task-1947209" [ 1648.450242] env[62405]: _type = "Task" [ 1648.450242] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1648.459067] env[62405]: DEBUG oslo_vmware.api [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Task: {'id': task-1947209, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1648.538658] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d0f951b3-4b7b-4f68-89cd-afb0e4e347fd tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Lock "777ddb84-25b9-4da6-be6b-a2289dbf510a" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.018s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1648.543342] env[62405]: DEBUG oslo_concurrency.lockutils [None req-91a773c1-df78-4e8b-9e9d-bfbd423aeaaa tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.107s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1648.545793] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5974aa0f-586f-47ba-95a5-8e34a83e9507 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.450s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1648.547308] env[62405]: INFO nova.compute.claims [None req-5974aa0f-586f-47ba-95a5-8e34a83e9507 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a6a0e918-425d-44de-a22b-8779e9108533] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1648.571143] env[62405]: INFO nova.scheduler.client.report [None req-91a773c1-df78-4e8b-9e9d-bfbd423aeaaa tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Deleted allocations for instance 262424b0-dc7d-4b6c-9539-2d6cd23a93da [ 1648.591076] env[62405]: DEBUG oslo_concurrency.lockutils [None req-49ce2ce0-492a-4a46-bff5-fa35bfab5354 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Releasing lock "refresh_cache-c392d6f3-b638-4857-826d-760c38b7d291" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1648.613505] env[62405]: DEBUG oslo_vmware.api [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947208, 'name': Rename_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1648.703071] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Releasing lock "refresh_cache-a9f83357-4898-44ff-a6d8-ea6621453de9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1648.703476] env[62405]: DEBUG nova.compute.manager [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] [instance: a9f83357-4898-44ff-a6d8-ea6621453de9] Instance network_info: |[{"id": "5e6a4310-9a98-402b-bb12-b6ed546139b9", "address": "fa:16:3e:08:94:fd", "network": {"id": "9e3e6e3d-df77-428f-b577-e4a42e82ec43", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.112", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "69dc3a146cd14230b1180689e2fea090", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31e77685-b4dd-4810-80ef-24115ea9ea62", "external-id": "nsx-vlan-transportzone-56", "segmentation_id": 56, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5e6a4310-9a", "ovs_interfaceid": "5e6a4310-9a98-402b-bb12-b6ed546139b9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1648.703900] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] [instance: a9f83357-4898-44ff-a6d8-ea6621453de9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:08:94:fd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '31e77685-b4dd-4810-80ef-24115ea9ea62', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5e6a4310-9a98-402b-bb12-b6ed546139b9', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1648.711851] env[62405]: DEBUG oslo.service.loopingcall [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1648.711972] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a9f83357-4898-44ff-a6d8-ea6621453de9] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1648.712137] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6c735439-a0bc-42fe-98a0-06e7e476cbbd {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.733142] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1648.733142] env[62405]: value = "task-1947210" [ 1648.733142] env[62405]: _type = "Task" [ 1648.733142] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1648.744365] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947210, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1648.749208] env[62405]: DEBUG nova.network.neutron [-] [instance: b8ff115b-64f1-4584-afa2-478c5e6b726b] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1648.965268] env[62405]: DEBUG oslo_vmware.api [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Task: {'id': task-1947209, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.126619} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1648.965268] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1648.965268] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96f4db5d-987a-4069-8a1f-6793a41eae8f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.988961] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Reconfiguring VM instance instance-0000002d to attach disk [datastore1] 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3/3c9487ff-2092-4cde-82d5-b38e5bc5c6e3.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1648.989539] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f898981b-ac21-4052-b03a-27bc58d7c0ba {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.012544] env[62405]: DEBUG oslo_vmware.api [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Waiting for the task: (returnval){ [ 1649.012544] env[62405]: value = "task-1947211" [ 1649.012544] env[62405]: _type = "Task" [ 1649.012544] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1649.023109] env[62405]: DEBUG oslo_vmware.api [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Task: {'id': task-1947211, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1649.080077] env[62405]: DEBUG oslo_concurrency.lockutils [None req-91a773c1-df78-4e8b-9e9d-bfbd423aeaaa tempest-FloatingIPsAssociationTestJSON-1653092102 tempest-FloatingIPsAssociationTestJSON-1653092102-project-member] Lock "262424b0-dc7d-4b6c-9539-2d6cd23a93da" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.363s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1649.089600] env[62405]: DEBUG nova.compute.manager [req-363e4607-9d83-42bd-92d9-41aae0c53ff1 req-64143d28-654f-4946-867a-ae8819e3762c service nova] [instance: b8ff115b-64f1-4584-afa2-478c5e6b726b] Received event network-vif-deleted-531c83a1-6a38-4d64-8757-3ffee5c271ee {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1649.096167] env[62405]: DEBUG nova.compute.manager [None req-49ce2ce0-492a-4a46-bff5-fa35bfab5354 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] [instance: c392d6f3-b638-4857-826d-760c38b7d291] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1649.100241] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2dbf536-37a8-4dd4-ae59-5d740dbf1511 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.119867] env[62405]: DEBUG oslo_vmware.api [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947208, 'name': Rename_Task, 'duration_secs': 1.317714} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1649.121380] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59957a81-5297-43d3-a673-024a53a19116] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1649.122067] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3a0a82f3-4230-40f0-8fa1-8e96b9c48c98 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.132486] env[62405]: DEBUG oslo_vmware.api [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Waiting for the task: (returnval){ [ 1649.132486] env[62405]: value = "task-1947212" [ 1649.132486] env[62405]: _type = "Task" [ 1649.132486] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1649.143357] env[62405]: DEBUG oslo_vmware.api [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947212, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1649.245015] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947210, 'name': CreateVM_Task} progress is 25%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1649.251336] env[62405]: INFO nova.compute.manager [-] [instance: b8ff115b-64f1-4584-afa2-478c5e6b726b] Took 1.45 seconds to deallocate network for instance. [ 1649.524356] env[62405]: DEBUG oslo_vmware.api [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Task: {'id': task-1947211, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1649.611596] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d0f951b3-4b7b-4f68-89cd-afb0e4e347fd tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Acquiring lock "777ddb84-25b9-4da6-be6b-a2289dbf510a" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1649.611897] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d0f951b3-4b7b-4f68-89cd-afb0e4e347fd tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Lock "777ddb84-25b9-4da6-be6b-a2289dbf510a" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1649.612203] env[62405]: INFO nova.compute.manager [None req-d0f951b3-4b7b-4f68-89cd-afb0e4e347fd tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [instance: 777ddb84-25b9-4da6-be6b-a2289dbf510a] Attaching volume a420f725-234f-4c1b-bcf6-23fd04729838 to /dev/sdb [ 1649.644242] env[62405]: DEBUG oslo_vmware.api [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947212, 'name': PowerOnVM_Task, 'duration_secs': 0.497087} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1649.647894] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59957a81-5297-43d3-a673-024a53a19116] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1649.648315] env[62405]: INFO nova.compute.manager [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59957a81-5297-43d3-a673-024a53a19116] Took 11.09 seconds to spawn the instance on the hypervisor. [ 1649.649567] env[62405]: DEBUG nova.compute.manager [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59957a81-5297-43d3-a673-024a53a19116] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1649.650855] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ae34dd0-7a88-4768-8b77-42caa2e12f2c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.655500] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4479288b-5919-4a97-ac17-6a53cdc41e85 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.672965] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df88136f-75e4-4397-9509-9426d7e32599 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.690326] env[62405]: DEBUG nova.virt.block_device [None req-d0f951b3-4b7b-4f68-89cd-afb0e4e347fd tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [instance: 777ddb84-25b9-4da6-be6b-a2289dbf510a] Updating existing volume attachment record: 511c5c2e-da2b-4fe6-bcaf-b7b2dcffc414 {{(pid=62405) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1649.750230] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947210, 'name': CreateVM_Task, 'duration_secs': 0.748415} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1649.753170] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a9f83357-4898-44ff-a6d8-ea6621453de9] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1649.754568] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1649.754568] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1649.755087] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1649.755087] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1dfb057d-86f4-45b0-bf36-d422033dfa41 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.757790] env[62405]: DEBUG oslo_concurrency.lockutils [None req-40310896-b13c-4525-845a-9678f76700fa tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1649.760143] env[62405]: DEBUG oslo_vmware.api [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Waiting for the task: (returnval){ [ 1649.760143] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52b444ff-ca99-ae20-39b1-70737198b3ae" [ 1649.760143] env[62405]: _type = "Task" [ 1649.760143] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1649.770625] env[62405]: DEBUG oslo_vmware.api [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52b444ff-ca99-ae20-39b1-70737198b3ae, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1650.027028] env[62405]: DEBUG oslo_vmware.api [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Task: {'id': task-1947211, 'name': ReconfigVM_Task, 'duration_secs': 0.712922} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1650.027319] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Reconfigured VM instance instance-0000002d to attach disk [datastore1] 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3/3c9487ff-2092-4cde-82d5-b38e5bc5c6e3.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1650.028058] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-153f8915-6ee1-400b-8ff6-428b57d1b081 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.035757] env[62405]: DEBUG oslo_vmware.api [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Waiting for the task: (returnval){ [ 1650.035757] env[62405]: value = "task-1947214" [ 1650.035757] env[62405]: _type = "Task" [ 1650.035757] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1650.050477] env[62405]: DEBUG oslo_vmware.api [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Task: {'id': task-1947214, 'name': Rename_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1650.130262] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57316786-a5c6-49d5-9c1c-cf50abc113ee {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.138640] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59332b7c-255a-4e00-9b98-311329de54e4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.148453] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1475650c-f700-4a1e-ba66-2feeeba23179 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.151925] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-49ce2ce0-492a-4a46-bff5-fa35bfab5354 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] [instance: c392d6f3-b638-4857-826d-760c38b7d291] Doing hard reboot of VM {{(pid=62405) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 1650.152263] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-08f3c545-c69f-49e4-ba20-73a502556aec {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.201018] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b815b46f-4905-4765-a08f-41bce96148dd {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.203929] env[62405]: DEBUG oslo_vmware.api [None req-49ce2ce0-492a-4a46-bff5-fa35bfab5354 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Waiting for the task: (returnval){ [ 1650.203929] env[62405]: value = "task-1947217" [ 1650.203929] env[62405]: _type = "Task" [ 1650.203929] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1650.204507] env[62405]: INFO nova.compute.manager [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59957a81-5297-43d3-a673-024a53a19116] Took 43.48 seconds to build instance. [ 1650.213948] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a10a3b9-6058-4db9-9228-803dbcda81c7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.223958] env[62405]: DEBUG oslo_vmware.api [None req-49ce2ce0-492a-4a46-bff5-fa35bfab5354 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Task: {'id': task-1947217, 'name': ResetVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1650.239628] env[62405]: DEBUG nova.compute.provider_tree [None req-5974aa0f-586f-47ba-95a5-8e34a83e9507 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1650.272091] env[62405]: DEBUG oslo_vmware.api [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52b444ff-ca99-ae20-39b1-70737198b3ae, 'name': SearchDatastore_Task, 'duration_secs': 0.009618} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1650.272479] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1650.272700] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] [instance: a9f83357-4898-44ff-a6d8-ea6621453de9] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1650.272862] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1650.273026] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1650.273231] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1650.273520] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4340daee-5b11-4e02-b5e6-608ddf853c33 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.283542] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1650.283735] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1650.284491] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-de29fd87-48ab-4c4d-bf51-7e70b59be8fc {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.290531] env[62405]: DEBUG oslo_vmware.api [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Waiting for the task: (returnval){ [ 1650.290531] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5214c8a6-eb6f-66a4-22cb-b1031b4a8bee" [ 1650.290531] env[62405]: _type = "Task" [ 1650.290531] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1650.299209] env[62405]: DEBUG oslo_vmware.api [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5214c8a6-eb6f-66a4-22cb-b1031b4a8bee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1650.397121] env[62405]: DEBUG oslo_concurrency.lockutils [None req-87095df7-2197-4f90-a2d9-59af74b670a8 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquiring lock "interface-23748dfd-7c60-41db-8acb-7b49cf1c27db-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1650.397413] env[62405]: DEBUG oslo_concurrency.lockutils [None req-87095df7-2197-4f90-a2d9-59af74b670a8 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Lock "interface-23748dfd-7c60-41db-8acb-7b49cf1c27db-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1650.397825] env[62405]: DEBUG nova.objects.instance [None req-87095df7-2197-4f90-a2d9-59af74b670a8 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Lazy-loading 'flavor' on Instance uuid 23748dfd-7c60-41db-8acb-7b49cf1c27db {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1650.545953] env[62405]: DEBUG oslo_vmware.api [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Task: {'id': task-1947214, 'name': Rename_Task, 'duration_secs': 0.2318} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1650.546263] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1650.546561] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0753ad1a-4787-49fa-990d-dc5b1ce46bcb {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.553577] env[62405]: DEBUG oslo_vmware.api [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Waiting for the task: (returnval){ [ 1650.553577] env[62405]: value = "task-1947218" [ 1650.553577] env[62405]: _type = "Task" [ 1650.553577] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1650.566203] env[62405]: DEBUG oslo_vmware.api [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Task: {'id': task-1947218, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1650.710008] env[62405]: DEBUG oslo_concurrency.lockutils [None req-35106ad6-a5a8-4713-addb-16d31320b11c tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Lock "59957a81-5297-43d3-a673-024a53a19116" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 60.688s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1650.717957] env[62405]: DEBUG oslo_vmware.api [None req-49ce2ce0-492a-4a46-bff5-fa35bfab5354 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Task: {'id': task-1947217, 'name': ResetVM_Task, 'duration_secs': 0.110085} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1650.718260] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-49ce2ce0-492a-4a46-bff5-fa35bfab5354 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] [instance: c392d6f3-b638-4857-826d-760c38b7d291] Did hard reboot of VM {{(pid=62405) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 1650.718490] env[62405]: DEBUG nova.compute.manager [None req-49ce2ce0-492a-4a46-bff5-fa35bfab5354 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] [instance: c392d6f3-b638-4857-826d-760c38b7d291] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1650.719449] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c6a619d-8272-4ce3-8a46-7f1d69b77d1d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.743405] env[62405]: DEBUG nova.scheduler.client.report [None req-5974aa0f-586f-47ba-95a5-8e34a83e9507 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1650.805331] env[62405]: DEBUG oslo_vmware.api [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5214c8a6-eb6f-66a4-22cb-b1031b4a8bee, 'name': SearchDatastore_Task, 'duration_secs': 0.010603} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1650.807338] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-63e0903e-de9f-4c1d-ba47-7329684fc208 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.815970] env[62405]: DEBUG oslo_vmware.api [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Waiting for the task: (returnval){ [ 1650.815970] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52397a47-ed2e-9322-c4d0-10cc4d6309a5" [ 1650.815970] env[62405]: _type = "Task" [ 1650.815970] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1650.827552] env[62405]: DEBUG oslo_vmware.api [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52397a47-ed2e-9322-c4d0-10cc4d6309a5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1651.010870] env[62405]: DEBUG nova.objects.instance [None req-87095df7-2197-4f90-a2d9-59af74b670a8 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Lazy-loading 'pci_requests' on Instance uuid 23748dfd-7c60-41db-8acb-7b49cf1c27db {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1651.064442] env[62405]: DEBUG oslo_vmware.api [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Task: {'id': task-1947218, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1651.236414] env[62405]: DEBUG oslo_concurrency.lockutils [None req-49ce2ce0-492a-4a46-bff5-fa35bfab5354 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Lock "c392d6f3-b638-4857-826d-760c38b7d291" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.391s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1651.250550] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5974aa0f-586f-47ba-95a5-8e34a83e9507 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.704s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1651.251180] env[62405]: DEBUG nova.compute.manager [None req-5974aa0f-586f-47ba-95a5-8e34a83e9507 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a6a0e918-425d-44de-a22b-8779e9108533] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1651.253823] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.731s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1651.254998] env[62405]: INFO nova.compute.claims [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] [instance: 6213702e-8e39-4342-b62f-2c9495017bf9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1651.289211] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e7b1419d-628e-439d-a22f-d99f8a6f3824 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Acquiring lock "1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1651.289211] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e7b1419d-628e-439d-a22f-d99f8a6f3824 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Lock "1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1651.289642] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e7b1419d-628e-439d-a22f-d99f8a6f3824 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Acquiring lock "1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1651.289642] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e7b1419d-628e-439d-a22f-d99f8a6f3824 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Lock "1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1651.290754] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e7b1419d-628e-439d-a22f-d99f8a6f3824 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Lock "1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1651.293432] env[62405]: INFO nova.compute.manager [None req-e7b1419d-628e-439d-a22f-d99f8a6f3824 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] [instance: 1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac] Terminating instance [ 1651.327339] env[62405]: DEBUG oslo_vmware.api [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52397a47-ed2e-9322-c4d0-10cc4d6309a5, 'name': SearchDatastore_Task, 'duration_secs': 0.010747} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1651.327615] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1651.327874] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] a9f83357-4898-44ff-a6d8-ea6621453de9/a9f83357-4898-44ff-a6d8-ea6621453de9.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1651.328143] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9a4a5141-7e57-485d-94f8-6bfbe100bf7b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.336255] env[62405]: DEBUG oslo_vmware.api [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Waiting for the task: (returnval){ [ 1651.336255] env[62405]: value = "task-1947219" [ 1651.336255] env[62405]: _type = "Task" [ 1651.336255] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1651.345602] env[62405]: DEBUG oslo_vmware.api [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Task: {'id': task-1947219, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1651.515235] env[62405]: DEBUG nova.objects.base [None req-87095df7-2197-4f90-a2d9-59af74b670a8 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Object Instance<23748dfd-7c60-41db-8acb-7b49cf1c27db> lazy-loaded attributes: flavor,pci_requests {{(pid=62405) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1651.515235] env[62405]: DEBUG nova.network.neutron [None req-87095df7-2197-4f90-a2d9-59af74b670a8 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 23748dfd-7c60-41db-8acb-7b49cf1c27db] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1651.565023] env[62405]: DEBUG oslo_vmware.api [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Task: {'id': task-1947218, 'name': PowerOnVM_Task, 'duration_secs': 0.542178} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1651.566475] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1651.566475] env[62405]: INFO nova.compute.manager [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Took 10.23 seconds to spawn the instance on the hypervisor. [ 1651.566475] env[62405]: DEBUG nova.compute.manager [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1651.566795] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cbea13e-9523-4cf1-91cc-bb970c9265ce {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.597376] env[62405]: DEBUG nova.policy [None req-87095df7-2197-4f90-a2d9-59af74b670a8 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '13540c2dbc2b43bcb151ec7b5894904c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ba9083cddcc24345b6ea5d2cbbbec5ba', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1651.764236] env[62405]: DEBUG nova.compute.utils [None req-5974aa0f-586f-47ba-95a5-8e34a83e9507 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1651.766727] env[62405]: DEBUG nova.compute.manager [None req-5974aa0f-586f-47ba-95a5-8e34a83e9507 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a6a0e918-425d-44de-a22b-8779e9108533] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1651.766909] env[62405]: DEBUG nova.network.neutron [None req-5974aa0f-586f-47ba-95a5-8e34a83e9507 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a6a0e918-425d-44de-a22b-8779e9108533] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1651.797899] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e7b1419d-628e-439d-a22f-d99f8a6f3824 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Acquiring lock "refresh_cache-1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1651.798349] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e7b1419d-628e-439d-a22f-d99f8a6f3824 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Acquired lock "refresh_cache-1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1651.798458] env[62405]: DEBUG nova.network.neutron [None req-e7b1419d-628e-439d-a22f-d99f8a6f3824 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] [instance: 1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1651.847764] env[62405]: DEBUG oslo_vmware.api [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Task: {'id': task-1947219, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.507004} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1651.848132] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] a9f83357-4898-44ff-a6d8-ea6621453de9/a9f83357-4898-44ff-a6d8-ea6621453de9.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1651.848446] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] [instance: a9f83357-4898-44ff-a6d8-ea6621453de9] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1651.848806] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c6558aff-ed5a-407a-b926-6cefc14ef64f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.859844] env[62405]: DEBUG oslo_vmware.api [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Waiting for the task: (returnval){ [ 1651.859844] env[62405]: value = "task-1947220" [ 1651.859844] env[62405]: _type = "Task" [ 1651.859844] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1651.861628] env[62405]: DEBUG nova.policy [None req-5974aa0f-586f-47ba-95a5-8e34a83e9507 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ae6ca334510b4445a23dc2fb38215590', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f1a1645e38674042828c78155974f95e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1651.874447] env[62405]: DEBUG oslo_vmware.api [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Task: {'id': task-1947220, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1652.090450] env[62405]: INFO nova.compute.manager [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Took 42.28 seconds to build instance. [ 1652.114132] env[62405]: DEBUG nova.network.neutron [None req-87095df7-2197-4f90-a2d9-59af74b670a8 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 23748dfd-7c60-41db-8acb-7b49cf1c27db] Successfully created port: 2a761bb8-a966-4a87-98b7-183fc71da74b {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1652.250898] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b93071d-3ff8-4b2f-8bce-9ca931316303 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.259096] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-a03ec064-d3f0-4b20-9b36-186bcfc10242 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59957a81-5297-43d3-a673-024a53a19116] Suspending the VM {{(pid=62405) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1652.259335] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-ed06a597-2fbe-430f-831a-1d72ca42c152 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.270496] env[62405]: DEBUG oslo_vmware.api [None req-a03ec064-d3f0-4b20-9b36-186bcfc10242 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Waiting for the task: (returnval){ [ 1652.270496] env[62405]: value = "task-1947222" [ 1652.270496] env[62405]: _type = "Task" [ 1652.270496] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1652.271273] env[62405]: DEBUG nova.compute.manager [None req-5974aa0f-586f-47ba-95a5-8e34a83e9507 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a6a0e918-425d-44de-a22b-8779e9108533] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1652.293902] env[62405]: DEBUG oslo_vmware.api [None req-a03ec064-d3f0-4b20-9b36-186bcfc10242 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947222, 'name': SuspendVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1652.332805] env[62405]: DEBUG nova.network.neutron [None req-e7b1419d-628e-439d-a22f-d99f8a6f3824 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] [instance: 1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1652.374382] env[62405]: DEBUG oslo_vmware.api [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Task: {'id': task-1947220, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068015} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1652.377055] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] [instance: a9f83357-4898-44ff-a6d8-ea6621453de9] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1652.378735] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-063e8a11-39d2-4c55-9066-254e4a2890ea {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.415032] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] [instance: a9f83357-4898-44ff-a6d8-ea6621453de9] Reconfiguring VM instance instance-0000002e to attach disk [datastore1] a9f83357-4898-44ff-a6d8-ea6621453de9/a9f83357-4898-44ff-a6d8-ea6621453de9.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1652.418738] env[62405]: DEBUG nova.network.neutron [None req-e7b1419d-628e-439d-a22f-d99f8a6f3824 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] [instance: 1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1652.419937] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-71f096f9-8e89-4b30-a366-04dd5e9ed0f0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.435101] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e7b1419d-628e-439d-a22f-d99f8a6f3824 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Releasing lock "refresh_cache-1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1652.435514] env[62405]: DEBUG nova.compute.manager [None req-e7b1419d-628e-439d-a22f-d99f8a6f3824 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] [instance: 1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1652.435707] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-e7b1419d-628e-439d-a22f-d99f8a6f3824 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] [instance: 1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1652.437035] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bede9814-8546-41f4-9ce7-dc6efac5981a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.446805] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7b1419d-628e-439d-a22f-d99f8a6f3824 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] [instance: 1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1652.449338] env[62405]: DEBUG nova.network.neutron [None req-5974aa0f-586f-47ba-95a5-8e34a83e9507 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a6a0e918-425d-44de-a22b-8779e9108533] Successfully created port: d504fb4b-5637-4d63-aaa3-5273e3b34481 {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1652.453188] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f16aa441-594a-42cc-ab8e-cc9697d51950 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.454723] env[62405]: DEBUG oslo_vmware.api [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Waiting for the task: (returnval){ [ 1652.454723] env[62405]: value = "task-1947223" [ 1652.454723] env[62405]: _type = "Task" [ 1652.454723] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1652.464264] env[62405]: DEBUG oslo_vmware.api [None req-e7b1419d-628e-439d-a22f-d99f8a6f3824 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Waiting for the task: (returnval){ [ 1652.464264] env[62405]: value = "task-1947224" [ 1652.464264] env[62405]: _type = "Task" [ 1652.464264] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1652.467754] env[62405]: DEBUG oslo_vmware.api [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Task: {'id': task-1947223, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1652.481071] env[62405]: DEBUG oslo_vmware.api [None req-e7b1419d-628e-439d-a22f-d99f8a6f3824 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Task: {'id': task-1947224, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1652.594582] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8e7b1166-1bbf-4b87-ae2b-5812bf58bbe4 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Lock "3c9487ff-2092-4cde-82d5-b38e5bc5c6e3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 59.963s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1652.784306] env[62405]: DEBUG oslo_vmware.api [None req-a03ec064-d3f0-4b20-9b36-186bcfc10242 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947222, 'name': SuspendVM_Task} progress is 58%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1652.836474] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba474e8b-cbab-4ec9-9e25-f8c15fd3bf34 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.846325] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0cf9b72-6fb9-456f-8de5-4c1ea6d228a3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.881628] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-890dab81-b165-4238-9464-b1944c71657d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.890730] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42861ded-be26-4a6e-8e9a-30b3af818a36 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.908427] env[62405]: DEBUG nova.compute.provider_tree [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1652.968967] env[62405]: DEBUG oslo_vmware.api [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Task: {'id': task-1947223, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1652.981737] env[62405]: DEBUG oslo_vmware.api [None req-e7b1419d-628e-439d-a22f-d99f8a6f3824 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Task: {'id': task-1947224, 'name': PowerOffVM_Task, 'duration_secs': 0.304363} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1652.982086] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7b1419d-628e-439d-a22f-d99f8a6f3824 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] [instance: 1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1652.982293] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-e7b1419d-628e-439d-a22f-d99f8a6f3824 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] [instance: 1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1652.982588] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8560ad33-d5ee-4796-a7d8-9201ec88694b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.012525] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-e7b1419d-628e-439d-a22f-d99f8a6f3824 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] [instance: 1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1653.012822] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-e7b1419d-628e-439d-a22f-d99f8a6f3824 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] [instance: 1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1653.013111] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-e7b1419d-628e-439d-a22f-d99f8a6f3824 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Deleting the datastore file [datastore1] 1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1653.013550] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6974173e-c557-4ba0-8f42-0e2eebe4e953 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.022126] env[62405]: DEBUG oslo_vmware.api [None req-e7b1419d-628e-439d-a22f-d99f8a6f3824 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Waiting for the task: (returnval){ [ 1653.022126] env[62405]: value = "task-1947226" [ 1653.022126] env[62405]: _type = "Task" [ 1653.022126] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1653.035439] env[62405]: DEBUG oslo_vmware.api [None req-e7b1419d-628e-439d-a22f-d99f8a6f3824 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Task: {'id': task-1947226, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1653.281855] env[62405]: DEBUG oslo_vmware.api [None req-a03ec064-d3f0-4b20-9b36-186bcfc10242 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947222, 'name': SuspendVM_Task, 'duration_secs': 0.729762} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1653.281855] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-a03ec064-d3f0-4b20-9b36-186bcfc10242 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59957a81-5297-43d3-a673-024a53a19116] Suspended the VM {{(pid=62405) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1653.281855] env[62405]: DEBUG nova.compute.manager [None req-a03ec064-d3f0-4b20-9b36-186bcfc10242 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59957a81-5297-43d3-a673-024a53a19116] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1653.282471] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fafe3f6-d277-4933-9a20-6cb9c7cc8ba9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.295941] env[62405]: DEBUG nova.compute.manager [None req-5974aa0f-586f-47ba-95a5-8e34a83e9507 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a6a0e918-425d-44de-a22b-8779e9108533] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1653.325021] env[62405]: DEBUG nova.virt.hardware [None req-5974aa0f-586f-47ba-95a5-8e34a83e9507 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1653.325021] env[62405]: DEBUG nova.virt.hardware [None req-5974aa0f-586f-47ba-95a5-8e34a83e9507 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1653.325021] env[62405]: DEBUG nova.virt.hardware [None req-5974aa0f-586f-47ba-95a5-8e34a83e9507 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1653.325329] env[62405]: DEBUG nova.virt.hardware [None req-5974aa0f-586f-47ba-95a5-8e34a83e9507 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1653.325329] env[62405]: DEBUG nova.virt.hardware [None req-5974aa0f-586f-47ba-95a5-8e34a83e9507 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1653.325329] env[62405]: DEBUG nova.virt.hardware [None req-5974aa0f-586f-47ba-95a5-8e34a83e9507 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1653.325329] env[62405]: DEBUG nova.virt.hardware [None req-5974aa0f-586f-47ba-95a5-8e34a83e9507 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1653.325329] env[62405]: DEBUG nova.virt.hardware [None req-5974aa0f-586f-47ba-95a5-8e34a83e9507 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1653.325510] env[62405]: DEBUG nova.virt.hardware [None req-5974aa0f-586f-47ba-95a5-8e34a83e9507 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1653.325510] env[62405]: DEBUG nova.virt.hardware [None req-5974aa0f-586f-47ba-95a5-8e34a83e9507 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1653.325510] env[62405]: DEBUG nova.virt.hardware [None req-5974aa0f-586f-47ba-95a5-8e34a83e9507 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1653.325510] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23262368-2d16-4d20-8370-82144993d500 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.334571] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-364d3502-1c49-401f-a3b4-ad22d019915a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.411734] env[62405]: DEBUG nova.scheduler.client.report [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1653.452666] env[62405]: DEBUG oslo_concurrency.lockutils [None req-eb04ba5e-6e45-4d29-b34e-8621c99d2b06 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Acquiring lock "c392d6f3-b638-4857-826d-760c38b7d291" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1653.453229] env[62405]: DEBUG oslo_concurrency.lockutils [None req-eb04ba5e-6e45-4d29-b34e-8621c99d2b06 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Lock "c392d6f3-b638-4857-826d-760c38b7d291" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1653.453474] env[62405]: DEBUG oslo_concurrency.lockutils [None req-eb04ba5e-6e45-4d29-b34e-8621c99d2b06 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Acquiring lock "c392d6f3-b638-4857-826d-760c38b7d291-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1653.453673] env[62405]: DEBUG oslo_concurrency.lockutils [None req-eb04ba5e-6e45-4d29-b34e-8621c99d2b06 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Lock "c392d6f3-b638-4857-826d-760c38b7d291-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1653.453843] env[62405]: DEBUG oslo_concurrency.lockutils [None req-eb04ba5e-6e45-4d29-b34e-8621c99d2b06 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Lock "c392d6f3-b638-4857-826d-760c38b7d291-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1653.456286] env[62405]: INFO nova.compute.manager [None req-eb04ba5e-6e45-4d29-b34e-8621c99d2b06 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] [instance: c392d6f3-b638-4857-826d-760c38b7d291] Terminating instance [ 1653.468309] env[62405]: DEBUG oslo_vmware.api [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Task: {'id': task-1947223, 'name': ReconfigVM_Task, 'duration_secs': 0.639187} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1653.468564] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] [instance: a9f83357-4898-44ff-a6d8-ea6621453de9] Reconfigured VM instance instance-0000002e to attach disk [datastore1] a9f83357-4898-44ff-a6d8-ea6621453de9/a9f83357-4898-44ff-a6d8-ea6621453de9.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1653.469183] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-587381f0-0714-4918-8999-71dd813d97d0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.478144] env[62405]: DEBUG oslo_vmware.api [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Waiting for the task: (returnval){ [ 1653.478144] env[62405]: value = "task-1947227" [ 1653.478144] env[62405]: _type = "Task" [ 1653.478144] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1653.487020] env[62405]: DEBUG oslo_vmware.api [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Task: {'id': task-1947227, 'name': Rename_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1653.532289] env[62405]: DEBUG oslo_vmware.api [None req-e7b1419d-628e-439d-a22f-d99f8a6f3824 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Task: {'id': task-1947226, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.101369} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1653.532594] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-e7b1419d-628e-439d-a22f-d99f8a6f3824 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1653.532803] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-e7b1419d-628e-439d-a22f-d99f8a6f3824 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] [instance: 1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1653.532988] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-e7b1419d-628e-439d-a22f-d99f8a6f3824 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] [instance: 1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1653.533211] env[62405]: INFO nova.compute.manager [None req-e7b1419d-628e-439d-a22f-d99f8a6f3824 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] [instance: 1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1653.533456] env[62405]: DEBUG oslo.service.loopingcall [None req-e7b1419d-628e-439d-a22f-d99f8a6f3824 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1653.533648] env[62405]: DEBUG nova.compute.manager [-] [instance: 1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1653.533743] env[62405]: DEBUG nova.network.neutron [-] [instance: 1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1653.550412] env[62405]: DEBUG nova.network.neutron [-] [instance: 1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1653.920020] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.663s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1653.920020] env[62405]: DEBUG nova.compute.manager [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] [instance: 6213702e-8e39-4342-b62f-2c9495017bf9] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1653.926342] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e2830b08-1efc-4547-9610-56ed95521a7e tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.160s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1653.926342] env[62405]: DEBUG nova.objects.instance [None req-e2830b08-1efc-4547-9610-56ed95521a7e tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] Lazy-loading 'resources' on Instance uuid 3f9849b8-6aaa-4d32-b140-207d5b54d68f {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1653.963772] env[62405]: DEBUG nova.compute.manager [None req-eb04ba5e-6e45-4d29-b34e-8621c99d2b06 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] [instance: c392d6f3-b638-4857-826d-760c38b7d291] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1653.964305] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-eb04ba5e-6e45-4d29-b34e-8621c99d2b06 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] [instance: c392d6f3-b638-4857-826d-760c38b7d291] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1653.965534] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2de4511-713d-4a8e-86bb-3108d8e95547 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.978949] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb04ba5e-6e45-4d29-b34e-8621c99d2b06 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] [instance: c392d6f3-b638-4857-826d-760c38b7d291] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1653.982719] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-44df359e-1d02-483d-afcf-ee9fea3aa9a5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.992662] env[62405]: DEBUG oslo_vmware.api [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Task: {'id': task-1947227, 'name': Rename_Task, 'duration_secs': 0.425683} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1653.993945] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] [instance: a9f83357-4898-44ff-a6d8-ea6621453de9] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1653.994495] env[62405]: DEBUG oslo_vmware.api [None req-eb04ba5e-6e45-4d29-b34e-8621c99d2b06 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Waiting for the task: (returnval){ [ 1653.994495] env[62405]: value = "task-1947228" [ 1653.994495] env[62405]: _type = "Task" [ 1653.994495] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1653.994717] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-251b263a-b9c3-4a15-a0d7-038ac4f46108 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.005244] env[62405]: DEBUG oslo_vmware.api [None req-eb04ba5e-6e45-4d29-b34e-8621c99d2b06 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Task: {'id': task-1947228, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1654.006973] env[62405]: DEBUG oslo_vmware.api [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Waiting for the task: (returnval){ [ 1654.006973] env[62405]: value = "task-1947229" [ 1654.006973] env[62405]: _type = "Task" [ 1654.006973] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1654.015913] env[62405]: DEBUG oslo_vmware.api [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Task: {'id': task-1947229, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1654.055306] env[62405]: DEBUG nova.network.neutron [-] [instance: 1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1654.167853] env[62405]: DEBUG nova.network.neutron [None req-87095df7-2197-4f90-a2d9-59af74b670a8 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 23748dfd-7c60-41db-8acb-7b49cf1c27db] Successfully updated port: 2a761bb8-a966-4a87-98b7-183fc71da74b {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1654.289038] env[62405]: DEBUG nova.network.neutron [None req-5974aa0f-586f-47ba-95a5-8e34a83e9507 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a6a0e918-425d-44de-a22b-8779e9108533] Successfully updated port: d504fb4b-5637-4d63-aaa3-5273e3b34481 {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1654.432189] env[62405]: DEBUG nova.compute.utils [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1654.434691] env[62405]: DEBUG nova.compute.manager [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] [instance: 6213702e-8e39-4342-b62f-2c9495017bf9] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1654.434825] env[62405]: DEBUG nova.network.neutron [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] [instance: 6213702e-8e39-4342-b62f-2c9495017bf9] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1654.502911] env[62405]: DEBUG nova.policy [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '21c1e2cb91f3428eb082da6ff92065fd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '024f8c817a3142b983afd4018e025452', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1654.508142] env[62405]: DEBUG oslo_vmware.api [None req-eb04ba5e-6e45-4d29-b34e-8621c99d2b06 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Task: {'id': task-1947228, 'name': PowerOffVM_Task, 'duration_secs': 0.243443} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1654.513599] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb04ba5e-6e45-4d29-b34e-8621c99d2b06 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] [instance: c392d6f3-b638-4857-826d-760c38b7d291] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1654.513823] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-eb04ba5e-6e45-4d29-b34e-8621c99d2b06 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] [instance: c392d6f3-b638-4857-826d-760c38b7d291] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1654.514300] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8f947c8c-98c7-4e90-80f2-be9054b71f8d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.524025] env[62405]: DEBUG oslo_vmware.api [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Task: {'id': task-1947229, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1654.564722] env[62405]: INFO nova.compute.manager [-] [instance: 1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac] Took 1.03 seconds to deallocate network for instance. [ 1654.672106] env[62405]: DEBUG oslo_concurrency.lockutils [None req-87095df7-2197-4f90-a2d9-59af74b670a8 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquiring lock "refresh_cache-23748dfd-7c60-41db-8acb-7b49cf1c27db" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1654.672298] env[62405]: DEBUG oslo_concurrency.lockutils [None req-87095df7-2197-4f90-a2d9-59af74b670a8 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquired lock "refresh_cache-23748dfd-7c60-41db-8acb-7b49cf1c27db" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1654.672491] env[62405]: DEBUG nova.network.neutron [None req-87095df7-2197-4f90-a2d9-59af74b670a8 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 23748dfd-7c60-41db-8acb-7b49cf1c27db] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1654.762309] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-d0f951b3-4b7b-4f68-89cd-afb0e4e347fd tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [instance: 777ddb84-25b9-4da6-be6b-a2289dbf510a] Volume attach. Driver type: vmdk {{(pid=62405) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1654.762309] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-d0f951b3-4b7b-4f68-89cd-afb0e4e347fd tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [instance: 777ddb84-25b9-4da6-be6b-a2289dbf510a] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-401430', 'volume_id': 'a420f725-234f-4c1b-bcf6-23fd04729838', 'name': 'volume-a420f725-234f-4c1b-bcf6-23fd04729838', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '777ddb84-25b9-4da6-be6b-a2289dbf510a', 'attached_at': '', 'detached_at': '', 'volume_id': 'a420f725-234f-4c1b-bcf6-23fd04729838', 'serial': 'a420f725-234f-4c1b-bcf6-23fd04729838'} {{(pid=62405) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1654.762309] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa931de0-fa04-4a6f-844a-1d34c6b4495c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.780612] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3873784e-6ab1-447c-935b-7bf79ec23ba6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.802378] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5974aa0f-586f-47ba-95a5-8e34a83e9507 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquiring lock "refresh_cache-a6a0e918-425d-44de-a22b-8779e9108533" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1654.802538] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5974aa0f-586f-47ba-95a5-8e34a83e9507 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquired lock "refresh_cache-a6a0e918-425d-44de-a22b-8779e9108533" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1654.802727] env[62405]: DEBUG nova.network.neutron [None req-5974aa0f-586f-47ba-95a5-8e34a83e9507 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a6a0e918-425d-44de-a22b-8779e9108533] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1654.813162] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-d0f951b3-4b7b-4f68-89cd-afb0e4e347fd tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [instance: 777ddb84-25b9-4da6-be6b-a2289dbf510a] Reconfiguring VM instance instance-00000015 to attach disk [datastore1] volume-a420f725-234f-4c1b-bcf6-23fd04729838/volume-a420f725-234f-4c1b-bcf6-23fd04729838.vmdk or device None with type thin {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1654.818170] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0682f9b3-2a04-4164-bc94-13fcc76c0721 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.836461] env[62405]: DEBUG nova.compute.manager [req-7528ea1d-45e7-4e8e-90fd-276f0cc4aa54 req-dd167a90-4bba-4369-b9f5-858cd165c1f7 service nova] [instance: 23748dfd-7c60-41db-8acb-7b49cf1c27db] Received event network-vif-plugged-2a761bb8-a966-4a87-98b7-183fc71da74b {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1654.836785] env[62405]: DEBUG oslo_concurrency.lockutils [req-7528ea1d-45e7-4e8e-90fd-276f0cc4aa54 req-dd167a90-4bba-4369-b9f5-858cd165c1f7 service nova] Acquiring lock "23748dfd-7c60-41db-8acb-7b49cf1c27db-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1654.837151] env[62405]: DEBUG oslo_concurrency.lockutils [req-7528ea1d-45e7-4e8e-90fd-276f0cc4aa54 req-dd167a90-4bba-4369-b9f5-858cd165c1f7 service nova] Lock "23748dfd-7c60-41db-8acb-7b49cf1c27db-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1654.837451] env[62405]: DEBUG oslo_concurrency.lockutils [req-7528ea1d-45e7-4e8e-90fd-276f0cc4aa54 req-dd167a90-4bba-4369-b9f5-858cd165c1f7 service nova] Lock "23748dfd-7c60-41db-8acb-7b49cf1c27db-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1654.837741] env[62405]: DEBUG nova.compute.manager [req-7528ea1d-45e7-4e8e-90fd-276f0cc4aa54 req-dd167a90-4bba-4369-b9f5-858cd165c1f7 service nova] [instance: 23748dfd-7c60-41db-8acb-7b49cf1c27db] No waiting events found dispatching network-vif-plugged-2a761bb8-a966-4a87-98b7-183fc71da74b {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1654.838022] env[62405]: WARNING nova.compute.manager [req-7528ea1d-45e7-4e8e-90fd-276f0cc4aa54 req-dd167a90-4bba-4369-b9f5-858cd165c1f7 service nova] [instance: 23748dfd-7c60-41db-8acb-7b49cf1c27db] Received unexpected event network-vif-plugged-2a761bb8-a966-4a87-98b7-183fc71da74b for instance with vm_state active and task_state None. [ 1654.847880] env[62405]: DEBUG oslo_vmware.api [None req-d0f951b3-4b7b-4f68-89cd-afb0e4e347fd tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Waiting for the task: (returnval){ [ 1654.847880] env[62405]: value = "task-1947231" [ 1654.847880] env[62405]: _type = "Task" [ 1654.847880] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1654.861303] env[62405]: DEBUG oslo_vmware.api [None req-d0f951b3-4b7b-4f68-89cd-afb0e4e347fd tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Task: {'id': task-1947231, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1654.873227] env[62405]: DEBUG nova.network.neutron [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] [instance: 6213702e-8e39-4342-b62f-2c9495017bf9] Successfully created port: f6fffc80-6395-4f72-8a63-b037918502c8 {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1654.926259] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4796ef4c-e60d-42a0-b311-2192ac2f069a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.935476] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-266bbd8f-6b29-4b60-9351-ab99b4e3ab91 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.939008] env[62405]: DEBUG nova.compute.manager [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] [instance: 6213702e-8e39-4342-b62f-2c9495017bf9] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1654.977629] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1047d4ec-ed16-4a1c-a265-3da4df7e6d8c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.987089] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-871f27fe-28ea-4268-b2cd-baa9dac5b658 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1655.005630] env[62405]: DEBUG nova.compute.provider_tree [None req-e2830b08-1efc-4547-9610-56ed95521a7e tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1655.019833] env[62405]: DEBUG oslo_vmware.api [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Task: {'id': task-1947229, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1655.027880] env[62405]: DEBUG oslo_concurrency.lockutils [None req-fedbb7f2-75ea-41de-b4e7-8164335aee2d tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Acquiring lock "67bf25ea-5774-4246-a3e6-2aeb0ebf6731" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1655.028152] env[62405]: DEBUG oslo_concurrency.lockutils [None req-fedbb7f2-75ea-41de-b4e7-8164335aee2d tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Lock "67bf25ea-5774-4246-a3e6-2aeb0ebf6731" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1655.032807] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-eb04ba5e-6e45-4d29-b34e-8621c99d2b06 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] [instance: c392d6f3-b638-4857-826d-760c38b7d291] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1655.033268] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-eb04ba5e-6e45-4d29-b34e-8621c99d2b06 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] [instance: c392d6f3-b638-4857-826d-760c38b7d291] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1655.033357] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb04ba5e-6e45-4d29-b34e-8621c99d2b06 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Deleting the datastore file [datastore1] c392d6f3-b638-4857-826d-760c38b7d291 {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1655.033872] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c5f16999-c88f-4792-851b-86840e0a61f5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1655.043190] env[62405]: DEBUG oslo_vmware.api [None req-eb04ba5e-6e45-4d29-b34e-8621c99d2b06 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Waiting for the task: (returnval){ [ 1655.043190] env[62405]: value = "task-1947232" [ 1655.043190] env[62405]: _type = "Task" [ 1655.043190] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1655.053627] env[62405]: DEBUG oslo_vmware.api [None req-eb04ba5e-6e45-4d29-b34e-8621c99d2b06 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Task: {'id': task-1947232, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1655.071366] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e7b1419d-628e-439d-a22f-d99f8a6f3824 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1655.212366] env[62405]: WARNING nova.network.neutron [None req-87095df7-2197-4f90-a2d9-59af74b670a8 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 23748dfd-7c60-41db-8acb-7b49cf1c27db] 8e7f7222-48db-4dd5-a9e8-9a6d2b598918 already exists in list: networks containing: ['8e7f7222-48db-4dd5-a9e8-9a6d2b598918']. ignoring it [ 1655.353031] env[62405]: DEBUG nova.network.neutron [None req-5974aa0f-586f-47ba-95a5-8e34a83e9507 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a6a0e918-425d-44de-a22b-8779e9108533] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1655.362119] env[62405]: DEBUG oslo_vmware.api [None req-d0f951b3-4b7b-4f68-89cd-afb0e4e347fd tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Task: {'id': task-1947231, 'name': ReconfigVM_Task, 'duration_secs': 0.434923} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1655.365649] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-d0f951b3-4b7b-4f68-89cd-afb0e4e347fd tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [instance: 777ddb84-25b9-4da6-be6b-a2289dbf510a] Reconfigured VM instance instance-00000015 to attach disk [datastore1] volume-a420f725-234f-4c1b-bcf6-23fd04729838/volume-a420f725-234f-4c1b-bcf6-23fd04729838.vmdk or device None with type thin {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1655.372831] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-24b7c5dd-232e-4e36-a6b4-23eb1703dc80 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1655.389878] env[62405]: DEBUG oslo_vmware.api [None req-d0f951b3-4b7b-4f68-89cd-afb0e4e347fd tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Waiting for the task: (returnval){ [ 1655.389878] env[62405]: value = "task-1947233" [ 1655.389878] env[62405]: _type = "Task" [ 1655.389878] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1655.400085] env[62405]: DEBUG oslo_vmware.api [None req-d0f951b3-4b7b-4f68-89cd-afb0e4e347fd tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Task: {'id': task-1947233, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1655.508351] env[62405]: DEBUG nova.network.neutron [None req-5974aa0f-586f-47ba-95a5-8e34a83e9507 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a6a0e918-425d-44de-a22b-8779e9108533] Updating instance_info_cache with network_info: [{"id": "d504fb4b-5637-4d63-aaa3-5273e3b34481", "address": "fa:16:3e:c1:e4:0a", "network": {"id": "845c4582-a0c8-4565-8025-5cc0fd22ff9a", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1066509768-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f1a1645e38674042828c78155974f95e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd504fb4b-56", "ovs_interfaceid": "d504fb4b-5637-4d63-aaa3-5273e3b34481", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1655.510193] env[62405]: DEBUG nova.scheduler.client.report [None req-e2830b08-1efc-4547-9610-56ed95521a7e tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1655.519371] env[62405]: DEBUG nova.network.neutron [None req-87095df7-2197-4f90-a2d9-59af74b670a8 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 23748dfd-7c60-41db-8acb-7b49cf1c27db] Updating instance_info_cache with network_info: [{"id": "666e898c-754c-4b07-b0d9-dac2a9a5bc6d", "address": "fa:16:3e:59:60:92", "network": {"id": "8e7f7222-48db-4dd5-a9e8-9a6d2b598918", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1945720715-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.216", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba9083cddcc24345b6ea5d2cbbbec5ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap666e898c-75", "ovs_interfaceid": "666e898c-754c-4b07-b0d9-dac2a9a5bc6d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "2a761bb8-a966-4a87-98b7-183fc71da74b", "address": "fa:16:3e:f2:11:e7", "network": {"id": "8e7f7222-48db-4dd5-a9e8-9a6d2b598918", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1945720715-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba9083cddcc24345b6ea5d2cbbbec5ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2a761bb8-a9", "ovs_interfaceid": "2a761bb8-a966-4a87-98b7-183fc71da74b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1655.524199] env[62405]: DEBUG oslo_vmware.api [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Task: {'id': task-1947229, 'name': PowerOnVM_Task, 'duration_secs': 1.155309} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1655.524686] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] [instance: a9f83357-4898-44ff-a6d8-ea6621453de9] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1655.524965] env[62405]: INFO nova.compute.manager [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] [instance: a9f83357-4898-44ff-a6d8-ea6621453de9] Took 11.46 seconds to spawn the instance on the hypervisor. [ 1655.525096] env[62405]: DEBUG nova.compute.manager [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] [instance: a9f83357-4898-44ff-a6d8-ea6621453de9] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1655.526642] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5005e355-c941-43e6-ba30-85cc249fc29c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1655.531690] env[62405]: DEBUG nova.compute.utils [None req-fedbb7f2-75ea-41de-b4e7-8164335aee2d tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1655.552485] env[62405]: DEBUG oslo_vmware.api [None req-eb04ba5e-6e45-4d29-b34e-8621c99d2b06 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Task: {'id': task-1947232, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.142603} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1655.552754] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb04ba5e-6e45-4d29-b34e-8621c99d2b06 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1655.552936] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-eb04ba5e-6e45-4d29-b34e-8621c99d2b06 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] [instance: c392d6f3-b638-4857-826d-760c38b7d291] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1655.553125] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-eb04ba5e-6e45-4d29-b34e-8621c99d2b06 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] [instance: c392d6f3-b638-4857-826d-760c38b7d291] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1655.553343] env[62405]: INFO nova.compute.manager [None req-eb04ba5e-6e45-4d29-b34e-8621c99d2b06 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] [instance: c392d6f3-b638-4857-826d-760c38b7d291] Took 1.59 seconds to destroy the instance on the hypervisor. [ 1655.553530] env[62405]: DEBUG oslo.service.loopingcall [None req-eb04ba5e-6e45-4d29-b34e-8621c99d2b06 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1655.553709] env[62405]: DEBUG nova.compute.manager [-] [instance: c392d6f3-b638-4857-826d-760c38b7d291] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1655.553802] env[62405]: DEBUG nova.network.neutron [-] [instance: c392d6f3-b638-4857-826d-760c38b7d291] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1655.901318] env[62405]: DEBUG oslo_vmware.api [None req-d0f951b3-4b7b-4f68-89cd-afb0e4e347fd tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Task: {'id': task-1947233, 'name': ReconfigVM_Task, 'duration_secs': 0.148372} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1655.901318] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-d0f951b3-4b7b-4f68-89cd-afb0e4e347fd tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [instance: 777ddb84-25b9-4da6-be6b-a2289dbf510a] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-401430', 'volume_id': 'a420f725-234f-4c1b-bcf6-23fd04729838', 'name': 'volume-a420f725-234f-4c1b-bcf6-23fd04729838', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '777ddb84-25b9-4da6-be6b-a2289dbf510a', 'attached_at': '', 'detached_at': '', 'volume_id': 'a420f725-234f-4c1b-bcf6-23fd04729838', 'serial': 'a420f725-234f-4c1b-bcf6-23fd04729838'} {{(pid=62405) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1655.947829] env[62405]: DEBUG nova.compute.manager [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] [instance: 6213702e-8e39-4342-b62f-2c9495017bf9] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1655.974239] env[62405]: DEBUG nova.virt.hardware [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1655.974499] env[62405]: DEBUG nova.virt.hardware [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1655.974707] env[62405]: DEBUG nova.virt.hardware [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1655.974906] env[62405]: DEBUG nova.virt.hardware [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1655.975064] env[62405]: DEBUG nova.virt.hardware [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1655.975212] env[62405]: DEBUG nova.virt.hardware [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1655.975415] env[62405]: DEBUG nova.virt.hardware [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1655.975572] env[62405]: DEBUG nova.virt.hardware [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1655.975731] env[62405]: DEBUG nova.virt.hardware [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1655.975890] env[62405]: DEBUG nova.virt.hardware [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1655.976067] env[62405]: DEBUG nova.virt.hardware [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1655.976908] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b5393c0-89b5-44f5-9251-0ee31259cfa9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1655.985760] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5baea63-e2f4-4fdf-999f-d86e12422fd6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.014791] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5974aa0f-586f-47ba-95a5-8e34a83e9507 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Releasing lock "refresh_cache-a6a0e918-425d-44de-a22b-8779e9108533" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1656.015104] env[62405]: DEBUG nova.compute.manager [None req-5974aa0f-586f-47ba-95a5-8e34a83e9507 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a6a0e918-425d-44de-a22b-8779e9108533] Instance network_info: |[{"id": "d504fb4b-5637-4d63-aaa3-5273e3b34481", "address": "fa:16:3e:c1:e4:0a", "network": {"id": "845c4582-a0c8-4565-8025-5cc0fd22ff9a", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1066509768-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f1a1645e38674042828c78155974f95e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd504fb4b-56", "ovs_interfaceid": "d504fb4b-5637-4d63-aaa3-5273e3b34481", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1656.015499] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-5974aa0f-586f-47ba-95a5-8e34a83e9507 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a6a0e918-425d-44de-a22b-8779e9108533] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c1:e4:0a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ca83c3bc-f3ec-42ab-85b3-192512f766f3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd504fb4b-5637-4d63-aaa3-5273e3b34481', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1656.022802] env[62405]: DEBUG oslo.service.loopingcall [None req-5974aa0f-586f-47ba-95a5-8e34a83e9507 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1656.023447] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e2830b08-1efc-4547-9610-56ed95521a7e tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.103s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1656.025398] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a6a0e918-425d-44de-a22b-8779e9108533] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1656.025883] env[62405]: DEBUG oslo_concurrency.lockutils [None req-584b1867-8b26-47e3-90b1-4fdfb2fd4e54 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.088s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1656.026212] env[62405]: DEBUG nova.objects.instance [None req-584b1867-8b26-47e3-90b1-4fdfb2fd4e54 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Lazy-loading 'resources' on Instance uuid 3c05b8fc-32b8-42e4-b3c2-95d1cdbc3dc8 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1656.027795] env[62405]: DEBUG oslo_concurrency.lockutils [None req-87095df7-2197-4f90-a2d9-59af74b670a8 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Releasing lock "refresh_cache-23748dfd-7c60-41db-8acb-7b49cf1c27db" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1656.028379] env[62405]: DEBUG oslo_concurrency.lockutils [None req-87095df7-2197-4f90-a2d9-59af74b670a8 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquiring lock "23748dfd-7c60-41db-8acb-7b49cf1c27db" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1656.028545] env[62405]: DEBUG oslo_concurrency.lockutils [None req-87095df7-2197-4f90-a2d9-59af74b670a8 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquired lock "23748dfd-7c60-41db-8acb-7b49cf1c27db" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1656.028745] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6ec5c2f1-5647-45db-9662-e0cefef21739 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.048361] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef41e1b1-8a4c-4db8-8a61-29898c06b4b5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.050732] env[62405]: DEBUG oslo_concurrency.lockutils [None req-fedbb7f2-75ea-41de-b4e7-8164335aee2d tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Lock "67bf25ea-5774-4246-a3e6-2aeb0ebf6731" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.023s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1656.055418] env[62405]: INFO nova.scheduler.client.report [None req-e2830b08-1efc-4547-9610-56ed95521a7e tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] Deleted allocations for instance 3f9849b8-6aaa-4d32-b140-207d5b54d68f [ 1656.060570] env[62405]: INFO nova.compute.manager [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] [instance: a9f83357-4898-44ff-a6d8-ea6621453de9] Took 41.79 seconds to build instance. [ 1656.066093] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1656.066093] env[62405]: value = "task-1947234" [ 1656.066093] env[62405]: _type = "Task" [ 1656.066093] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1656.079273] env[62405]: DEBUG nova.virt.hardware [None req-87095df7-2197-4f90-a2d9-59af74b670a8 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1656.079568] env[62405]: DEBUG nova.virt.hardware [None req-87095df7-2197-4f90-a2d9-59af74b670a8 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1656.079761] env[62405]: DEBUG nova.virt.hardware [None req-87095df7-2197-4f90-a2d9-59af74b670a8 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1656.079999] env[62405]: DEBUG nova.virt.hardware [None req-87095df7-2197-4f90-a2d9-59af74b670a8 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1656.080217] env[62405]: DEBUG nova.virt.hardware [None req-87095df7-2197-4f90-a2d9-59af74b670a8 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1656.080399] env[62405]: DEBUG nova.virt.hardware [None req-87095df7-2197-4f90-a2d9-59af74b670a8 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1656.080643] env[62405]: DEBUG nova.virt.hardware [None req-87095df7-2197-4f90-a2d9-59af74b670a8 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1656.080851] env[62405]: DEBUG nova.virt.hardware [None req-87095df7-2197-4f90-a2d9-59af74b670a8 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1656.081048] env[62405]: DEBUG nova.virt.hardware [None req-87095df7-2197-4f90-a2d9-59af74b670a8 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1656.081257] env[62405]: DEBUG nova.virt.hardware [None req-87095df7-2197-4f90-a2d9-59af74b670a8 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1656.081467] env[62405]: DEBUG nova.virt.hardware [None req-87095df7-2197-4f90-a2d9-59af74b670a8 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1656.087867] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-87095df7-2197-4f90-a2d9-59af74b670a8 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 23748dfd-7c60-41db-8acb-7b49cf1c27db] Reconfiguring VM to attach interface {{(pid=62405) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1656.094123] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-71e7237c-8f96-4fde-804a-5133b360b1c0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.114519] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947234, 'name': CreateVM_Task} progress is 15%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1656.116355] env[62405]: DEBUG oslo_vmware.api [None req-87095df7-2197-4f90-a2d9-59af74b670a8 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Waiting for the task: (returnval){ [ 1656.116355] env[62405]: value = "task-1947235" [ 1656.116355] env[62405]: _type = "Task" [ 1656.116355] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1656.128568] env[62405]: DEBUG oslo_vmware.api [None req-87095df7-2197-4f90-a2d9-59af74b670a8 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947235, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1656.250174] env[62405]: DEBUG nova.network.neutron [-] [instance: c392d6f3-b638-4857-826d-760c38b7d291] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1656.478082] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20e2acee-95a8-4f2c-90c6-d565892f9cf7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.487429] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c21381a-6d2a-4f91-b46e-9d416b063ca4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.520370] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05e8f284-2be1-458d-bf4d-82ecf05e149f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.528791] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d8d57c6-dcbd-4dde-8ca9-aa0e7cf9f323 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.543177] env[62405]: DEBUG nova.compute.provider_tree [None req-584b1867-8b26-47e3-90b1-4fdfb2fd4e54 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1656.565056] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4b2d6de8-885e-4cde-ac31-4756fbc755d0 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Lock "a9f83357-4898-44ff-a6d8-ea6621453de9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 62.163s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1656.565764] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e2830b08-1efc-4547-9610-56ed95521a7e tempest-ServersTestBootFromVolume-1900402084 tempest-ServersTestBootFromVolume-1900402084-project-member] Lock "3f9849b8-6aaa-4d32-b140-207d5b54d68f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.245s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1656.590198] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947234, 'name': CreateVM_Task, 'duration_secs': 0.413429} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1656.590429] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a6a0e918-425d-44de-a22b-8779e9108533] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1656.591247] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5974aa0f-586f-47ba-95a5-8e34a83e9507 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1656.591405] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5974aa0f-586f-47ba-95a5-8e34a83e9507 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1656.591621] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5974aa0f-586f-47ba-95a5-8e34a83e9507 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1656.591883] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-47bf107f-96c0-4d68-b6bb-484b798a4433 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.601855] env[62405]: DEBUG oslo_vmware.api [None req-5974aa0f-586f-47ba-95a5-8e34a83e9507 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for the task: (returnval){ [ 1656.601855] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]524a1c61-8f4a-0892-e8c8-127bc4d36c61" [ 1656.601855] env[62405]: _type = "Task" [ 1656.601855] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1656.615132] env[62405]: DEBUG oslo_vmware.api [None req-5974aa0f-586f-47ba-95a5-8e34a83e9507 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]524a1c61-8f4a-0892-e8c8-127bc4d36c61, 'name': SearchDatastore_Task, 'duration_secs': 0.010874} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1656.615553] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5974aa0f-586f-47ba-95a5-8e34a83e9507 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1656.615899] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-5974aa0f-586f-47ba-95a5-8e34a83e9507 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a6a0e918-425d-44de-a22b-8779e9108533] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1656.615972] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5974aa0f-586f-47ba-95a5-8e34a83e9507 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1656.616126] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5974aa0f-586f-47ba-95a5-8e34a83e9507 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1656.616317] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-5974aa0f-586f-47ba-95a5-8e34a83e9507 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1656.616856] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4045c0bd-a534-4891-b7d3-4d7dc34b9e9e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.630213] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-5974aa0f-586f-47ba-95a5-8e34a83e9507 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1656.630451] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-5974aa0f-586f-47ba-95a5-8e34a83e9507 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1656.634793] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9a82f7dd-cb65-4e4a-bda5-67639d986b68 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.638131] env[62405]: DEBUG oslo_vmware.api [None req-87095df7-2197-4f90-a2d9-59af74b670a8 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947235, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1656.643363] env[62405]: DEBUG oslo_vmware.api [None req-5974aa0f-586f-47ba-95a5-8e34a83e9507 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for the task: (returnval){ [ 1656.643363] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52e0d7cd-6747-bd90-a778-e816115ae60c" [ 1656.643363] env[62405]: _type = "Task" [ 1656.643363] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1656.653176] env[62405]: DEBUG oslo_vmware.api [None req-5974aa0f-586f-47ba-95a5-8e34a83e9507 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52e0d7cd-6747-bd90-a778-e816115ae60c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1656.681095] env[62405]: DEBUG nova.network.neutron [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] [instance: 6213702e-8e39-4342-b62f-2c9495017bf9] Successfully updated port: f6fffc80-6395-4f72-8a63-b037918502c8 {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1656.753125] env[62405]: INFO nova.compute.manager [-] [instance: c392d6f3-b638-4857-826d-760c38b7d291] Took 1.20 seconds to deallocate network for instance. [ 1656.951006] env[62405]: DEBUG nova.objects.instance [None req-d0f951b3-4b7b-4f68-89cd-afb0e4e347fd tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Lazy-loading 'flavor' on Instance uuid 777ddb84-25b9-4da6-be6b-a2289dbf510a {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1657.065315] env[62405]: ERROR nova.scheduler.client.report [None req-584b1867-8b26-47e3-90b1-4fdfb2fd4e54 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] [req-b0bfc6cd-27db-409f-850e-d087d97b9e21] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7d5eded7-a501-4fa6-b1d3-60e273d555d7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-b0bfc6cd-27db-409f-850e-d087d97b9e21"}]} [ 1657.082042] env[62405]: DEBUG nova.scheduler.client.report [None req-584b1867-8b26-47e3-90b1-4fdfb2fd4e54 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Refreshing inventories for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1657.097099] env[62405]: DEBUG nova.scheduler.client.report [None req-584b1867-8b26-47e3-90b1-4fdfb2fd4e54 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Updating ProviderTree inventory for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1657.097362] env[62405]: DEBUG nova.compute.provider_tree [None req-584b1867-8b26-47e3-90b1-4fdfb2fd4e54 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1657.103117] env[62405]: DEBUG oslo_concurrency.lockutils [None req-fedbb7f2-75ea-41de-b4e7-8164335aee2d tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Acquiring lock "67bf25ea-5774-4246-a3e6-2aeb0ebf6731" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1657.103360] env[62405]: DEBUG oslo_concurrency.lockutils [None req-fedbb7f2-75ea-41de-b4e7-8164335aee2d tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Lock "67bf25ea-5774-4246-a3e6-2aeb0ebf6731" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1657.103588] env[62405]: INFO nova.compute.manager [None req-fedbb7f2-75ea-41de-b4e7-8164335aee2d tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 67bf25ea-5774-4246-a3e6-2aeb0ebf6731] Attaching volume dd61f776-0703-457f-8823-3fc5792787a0 to /dev/sdb [ 1657.111009] env[62405]: DEBUG nova.scheduler.client.report [None req-584b1867-8b26-47e3-90b1-4fdfb2fd4e54 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Refreshing aggregate associations for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7, aggregates: None {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1657.130173] env[62405]: DEBUG oslo_vmware.api [None req-87095df7-2197-4f90-a2d9-59af74b670a8 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947235, 'name': ReconfigVM_Task, 'duration_secs': 0.658403} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1657.131089] env[62405]: DEBUG nova.scheduler.client.report [None req-584b1867-8b26-47e3-90b1-4fdfb2fd4e54 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Refreshing trait associations for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1657.133426] env[62405]: DEBUG oslo_concurrency.lockutils [None req-87095df7-2197-4f90-a2d9-59af74b670a8 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Releasing lock "23748dfd-7c60-41db-8acb-7b49cf1c27db" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1657.134108] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-87095df7-2197-4f90-a2d9-59af74b670a8 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 23748dfd-7c60-41db-8acb-7b49cf1c27db] Reconfigured VM to attach interface {{(pid=62405) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1657.139814] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c551f76f-f418-489b-9cec-318ee3f83778 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.148820] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f75c50cf-8c34-4b41-a65a-244400f68095 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.160367] env[62405]: DEBUG oslo_vmware.api [None req-5974aa0f-586f-47ba-95a5-8e34a83e9507 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52e0d7cd-6747-bd90-a778-e816115ae60c, 'name': SearchDatastore_Task, 'duration_secs': 0.010357} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1657.161101] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7ed23d98-0896-4eeb-b8b9-d0a4954317cf {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.166678] env[62405]: DEBUG nova.virt.block_device [None req-fedbb7f2-75ea-41de-b4e7-8164335aee2d tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 67bf25ea-5774-4246-a3e6-2aeb0ebf6731] Updating existing volume attachment record: fe8052e8-3f24-44e6-857b-9c019c5c33f9 {{(pid=62405) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1657.174266] env[62405]: DEBUG oslo_vmware.api [None req-5974aa0f-586f-47ba-95a5-8e34a83e9507 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for the task: (returnval){ [ 1657.174266] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5290a632-9105-311a-02f0-18fc5ada7410" [ 1657.174266] env[62405]: _type = "Task" [ 1657.174266] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1657.184034] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Acquiring lock "refresh_cache-6213702e-8e39-4342-b62f-2c9495017bf9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1657.184153] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Acquired lock "refresh_cache-6213702e-8e39-4342-b62f-2c9495017bf9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1657.184197] env[62405]: DEBUG nova.network.neutron [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] [instance: 6213702e-8e39-4342-b62f-2c9495017bf9] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1657.185331] env[62405]: DEBUG oslo_vmware.api [None req-5974aa0f-586f-47ba-95a5-8e34a83e9507 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5290a632-9105-311a-02f0-18fc5ada7410, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1657.260770] env[62405]: DEBUG oslo_concurrency.lockutils [None req-eb04ba5e-6e45-4d29-b34e-8621c99d2b06 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1657.459191] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d0f951b3-4b7b-4f68-89cd-afb0e4e347fd tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Lock "777ddb84-25b9-4da6-be6b-a2289dbf510a" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.847s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1657.513977] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17000dba-ae65-4595-b67e-ba684cd1d6ce {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.522657] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cab2db85-262a-4600-98f2-4ac45395080d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.554569] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09fd1a18-c59b-484c-9cf1-cb559ba442a7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.562614] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7980543e-8733-4992-8f6d-e85402a52cff {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.576321] env[62405]: DEBUG nova.compute.provider_tree [None req-584b1867-8b26-47e3-90b1-4fdfb2fd4e54 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1657.638945] env[62405]: DEBUG oslo_concurrency.lockutils [None req-87095df7-2197-4f90-a2d9-59af74b670a8 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Lock "interface-23748dfd-7c60-41db-8acb-7b49cf1c27db-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 7.241s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1657.685711] env[62405]: DEBUG oslo_vmware.api [None req-5974aa0f-586f-47ba-95a5-8e34a83e9507 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5290a632-9105-311a-02f0-18fc5ada7410, 'name': SearchDatastore_Task, 'duration_secs': 0.010592} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1657.687955] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5974aa0f-586f-47ba-95a5-8e34a83e9507 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1657.688260] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-5974aa0f-586f-47ba-95a5-8e34a83e9507 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] a6a0e918-425d-44de-a22b-8779e9108533/a6a0e918-425d-44de-a22b-8779e9108533.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1657.688874] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-af8fd1ec-2825-4d4c-b377-e2220dc1ac1f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.697857] env[62405]: DEBUG oslo_vmware.api [None req-5974aa0f-586f-47ba-95a5-8e34a83e9507 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for the task: (returnval){ [ 1657.697857] env[62405]: value = "task-1947239" [ 1657.697857] env[62405]: _type = "Task" [ 1657.697857] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1657.707928] env[62405]: DEBUG oslo_vmware.api [None req-5974aa0f-586f-47ba-95a5-8e34a83e9507 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947239, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1657.722428] env[62405]: DEBUG nova.network.neutron [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] [instance: 6213702e-8e39-4342-b62f-2c9495017bf9] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1657.882795] env[62405]: DEBUG nova.network.neutron [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] [instance: 6213702e-8e39-4342-b62f-2c9495017bf9] Updating instance_info_cache with network_info: [{"id": "f6fffc80-6395-4f72-8a63-b037918502c8", "address": "fa:16:3e:e9:4c:f5", "network": {"id": "bdf0ffbc-8220-49ae-80a5-06dfea99bea9", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1271406419-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "024f8c817a3142b983afd4018e025452", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ff90ec9-3c7e-4e76-b409-fcf37fc588d8", "external-id": "nsx-vlan-transportzone-475", "segmentation_id": 475, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf6fffc80-63", "ovs_interfaceid": "f6fffc80-6395-4f72-8a63-b037918502c8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1658.136482] env[62405]: DEBUG nova.scheduler.client.report [None req-584b1867-8b26-47e3-90b1-4fdfb2fd4e54 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Updated inventory for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with generation 74 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1658.136784] env[62405]: DEBUG nova.compute.provider_tree [None req-584b1867-8b26-47e3-90b1-4fdfb2fd4e54 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Updating resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 generation from 74 to 75 during operation: update_inventory {{(pid=62405) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1658.137478] env[62405]: DEBUG nova.compute.provider_tree [None req-584b1867-8b26-47e3-90b1-4fdfb2fd4e54 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1658.210305] env[62405]: DEBUG oslo_vmware.api [None req-5974aa0f-586f-47ba-95a5-8e34a83e9507 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947239, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.501169} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1658.210609] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-5974aa0f-586f-47ba-95a5-8e34a83e9507 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] a6a0e918-425d-44de-a22b-8779e9108533/a6a0e918-425d-44de-a22b-8779e9108533.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1658.210907] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-5974aa0f-586f-47ba-95a5-8e34a83e9507 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a6a0e918-425d-44de-a22b-8779e9108533] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1658.211208] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2c6f8c9b-e335-4921-97cf-f3e9f60dcd7e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.218503] env[62405]: DEBUG oslo_vmware.api [None req-5974aa0f-586f-47ba-95a5-8e34a83e9507 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for the task: (returnval){ [ 1658.218503] env[62405]: value = "task-1947240" [ 1658.218503] env[62405]: _type = "Task" [ 1658.218503] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1658.227717] env[62405]: DEBUG oslo_vmware.api [None req-5974aa0f-586f-47ba-95a5-8e34a83e9507 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947240, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1658.386730] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Releasing lock "refresh_cache-6213702e-8e39-4342-b62f-2c9495017bf9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1658.386730] env[62405]: DEBUG nova.compute.manager [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] [instance: 6213702e-8e39-4342-b62f-2c9495017bf9] Instance network_info: |[{"id": "f6fffc80-6395-4f72-8a63-b037918502c8", "address": "fa:16:3e:e9:4c:f5", "network": {"id": "bdf0ffbc-8220-49ae-80a5-06dfea99bea9", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1271406419-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "024f8c817a3142b983afd4018e025452", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ff90ec9-3c7e-4e76-b409-fcf37fc588d8", "external-id": "nsx-vlan-transportzone-475", "segmentation_id": 475, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf6fffc80-63", "ovs_interfaceid": "f6fffc80-6395-4f72-8a63-b037918502c8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1658.386965] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] [instance: 6213702e-8e39-4342-b62f-2c9495017bf9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e9:4c:f5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2ff90ec9-3c7e-4e76-b409-fcf37fc588d8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f6fffc80-6395-4f72-8a63-b037918502c8', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1658.395381] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Creating folder: Project (024f8c817a3142b983afd4018e025452). Parent ref: group-v401284. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1658.397114] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-58c30308-50d6-4539-b7fa-fe4cf9edb0db {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.401167] env[62405]: DEBUG nova.compute.manager [req-4b8320a2-0a76-4c94-9e51-f1cd2b4bf03f req-d9bd710e-a5ff-4bd2-afc8-4803f462eb30 service nova] [instance: 23748dfd-7c60-41db-8acb-7b49cf1c27db] Received event network-changed-2a761bb8-a966-4a87-98b7-183fc71da74b {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1658.401386] env[62405]: DEBUG nova.compute.manager [req-4b8320a2-0a76-4c94-9e51-f1cd2b4bf03f req-d9bd710e-a5ff-4bd2-afc8-4803f462eb30 service nova] [instance: 23748dfd-7c60-41db-8acb-7b49cf1c27db] Refreshing instance network info cache due to event network-changed-2a761bb8-a966-4a87-98b7-183fc71da74b. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1658.401628] env[62405]: DEBUG oslo_concurrency.lockutils [req-4b8320a2-0a76-4c94-9e51-f1cd2b4bf03f req-d9bd710e-a5ff-4bd2-afc8-4803f462eb30 service nova] Acquiring lock "refresh_cache-23748dfd-7c60-41db-8acb-7b49cf1c27db" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1658.401801] env[62405]: DEBUG oslo_concurrency.lockutils [req-4b8320a2-0a76-4c94-9e51-f1cd2b4bf03f req-d9bd710e-a5ff-4bd2-afc8-4803f462eb30 service nova] Acquired lock "refresh_cache-23748dfd-7c60-41db-8acb-7b49cf1c27db" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1658.401988] env[62405]: DEBUG nova.network.neutron [req-4b8320a2-0a76-4c94-9e51-f1cd2b4bf03f req-d9bd710e-a5ff-4bd2-afc8-4803f462eb30 service nova] [instance: 23748dfd-7c60-41db-8acb-7b49cf1c27db] Refreshing network info cache for port 2a761bb8-a966-4a87-98b7-183fc71da74b {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1658.416360] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Created folder: Project (024f8c817a3142b983afd4018e025452) in parent group-v401284. [ 1658.416602] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Creating folder: Instances. Parent ref: group-v401434. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1658.416880] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-49332ab6-5968-43ec-9726-07b42701b83c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.428680] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Created folder: Instances in parent group-v401434. [ 1658.428943] env[62405]: DEBUG oslo.service.loopingcall [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1658.429156] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6213702e-8e39-4342-b62f-2c9495017bf9] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1658.429691] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3228d4ca-1c9d-4fed-958d-a22a9f5eb9d9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.450521] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1658.450521] env[62405]: value = "task-1947243" [ 1658.450521] env[62405]: _type = "Task" [ 1658.450521] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1658.459541] env[62405]: DEBUG nova.compute.manager [None req-966cba6a-1b89-4866-bb58-34ddcb775da9 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59957a81-5297-43d3-a673-024a53a19116] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1658.459807] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947243, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1658.460547] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec71af6a-f137-438c-8759-5f8a01f5acb9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.645685] env[62405]: DEBUG oslo_concurrency.lockutils [None req-584b1867-8b26-47e3-90b1-4fdfb2fd4e54 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.620s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1658.648095] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.337s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1658.650027] env[62405]: INFO nova.compute.claims [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] [instance: 377365a4-7538-4bab-a181-1940e6fb4066] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1658.678932] env[62405]: INFO nova.scheduler.client.report [None req-584b1867-8b26-47e3-90b1-4fdfb2fd4e54 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Deleted allocations for instance 3c05b8fc-32b8-42e4-b3c2-95d1cdbc3dc8 [ 1658.729878] env[62405]: DEBUG oslo_vmware.api [None req-5974aa0f-586f-47ba-95a5-8e34a83e9507 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947240, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079046} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1658.730506] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-5974aa0f-586f-47ba-95a5-8e34a83e9507 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a6a0e918-425d-44de-a22b-8779e9108533] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1658.730938] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df4e72da-3e20-41be-a617-c7d37bc26eda {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.756260] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-5974aa0f-586f-47ba-95a5-8e34a83e9507 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a6a0e918-425d-44de-a22b-8779e9108533] Reconfiguring VM instance instance-0000002f to attach disk [datastore1] a6a0e918-425d-44de-a22b-8779e9108533/a6a0e918-425d-44de-a22b-8779e9108533.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1658.756798] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-38ef1b62-dc27-4f40-9d88-236d71c529cd {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.778413] env[62405]: DEBUG oslo_vmware.api [None req-5974aa0f-586f-47ba-95a5-8e34a83e9507 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for the task: (returnval){ [ 1658.778413] env[62405]: value = "task-1947244" [ 1658.778413] env[62405]: _type = "Task" [ 1658.778413] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1658.791379] env[62405]: DEBUG oslo_vmware.api [None req-5974aa0f-586f-47ba-95a5-8e34a83e9507 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947244, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1658.968205] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947243, 'name': CreateVM_Task} progress is 25%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1658.971701] env[62405]: INFO nova.compute.manager [None req-966cba6a-1b89-4866-bb58-34ddcb775da9 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59957a81-5297-43d3-a673-024a53a19116] instance snapshotting [ 1658.972138] env[62405]: WARNING nova.compute.manager [None req-966cba6a-1b89-4866-bb58-34ddcb775da9 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59957a81-5297-43d3-a673-024a53a19116] trying to snapshot a non-running instance: (state: 7 expected: 1) [ 1658.975113] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de564796-d40e-4464-b0f3-d4b65246abcf {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.999191] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c34544e-5963-448a-ac4e-c9e32763f26c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.188438] env[62405]: DEBUG oslo_concurrency.lockutils [None req-584b1867-8b26-47e3-90b1-4fdfb2fd4e54 tempest-ServerAddressesNegativeTestJSON-1675117428 tempest-ServerAddressesNegativeTestJSON-1675117428-project-member] Lock "3c05b8fc-32b8-42e4-b3c2-95d1cdbc3dc8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.675s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1659.223285] env[62405]: DEBUG nova.network.neutron [req-4b8320a2-0a76-4c94-9e51-f1cd2b4bf03f req-d9bd710e-a5ff-4bd2-afc8-4803f462eb30 service nova] [instance: 23748dfd-7c60-41db-8acb-7b49cf1c27db] Updated VIF entry in instance network info cache for port 2a761bb8-a966-4a87-98b7-183fc71da74b. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1659.223724] env[62405]: DEBUG nova.network.neutron [req-4b8320a2-0a76-4c94-9e51-f1cd2b4bf03f req-d9bd710e-a5ff-4bd2-afc8-4803f462eb30 service nova] [instance: 23748dfd-7c60-41db-8acb-7b49cf1c27db] Updating instance_info_cache with network_info: [{"id": "666e898c-754c-4b07-b0d9-dac2a9a5bc6d", "address": "fa:16:3e:59:60:92", "network": {"id": "8e7f7222-48db-4dd5-a9e8-9a6d2b598918", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1945720715-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.216", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba9083cddcc24345b6ea5d2cbbbec5ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap666e898c-75", "ovs_interfaceid": "666e898c-754c-4b07-b0d9-dac2a9a5bc6d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "2a761bb8-a966-4a87-98b7-183fc71da74b", "address": "fa:16:3e:f2:11:e7", "network": {"id": "8e7f7222-48db-4dd5-a9e8-9a6d2b598918", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1945720715-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba9083cddcc24345b6ea5d2cbbbec5ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2a761bb8-a9", "ovs_interfaceid": "2a761bb8-a966-4a87-98b7-183fc71da74b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1659.299023] env[62405]: DEBUG oslo_vmware.api [None req-5974aa0f-586f-47ba-95a5-8e34a83e9507 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947244, 'name': ReconfigVM_Task, 'duration_secs': 0.307059} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1659.299023] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-5974aa0f-586f-47ba-95a5-8e34a83e9507 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a6a0e918-425d-44de-a22b-8779e9108533] Reconfigured VM instance instance-0000002f to attach disk [datastore1] a6a0e918-425d-44de-a22b-8779e9108533/a6a0e918-425d-44de-a22b-8779e9108533.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1659.299023] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b17f2cab-cbe2-4b14-964a-6341c2fbe355 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.309228] env[62405]: DEBUG oslo_vmware.api [None req-5974aa0f-586f-47ba-95a5-8e34a83e9507 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for the task: (returnval){ [ 1659.309228] env[62405]: value = "task-1947245" [ 1659.309228] env[62405]: _type = "Task" [ 1659.309228] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1659.319738] env[62405]: DEBUG oslo_vmware.api [None req-5974aa0f-586f-47ba-95a5-8e34a83e9507 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947245, 'name': Rename_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1659.385654] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e62851b-ea63-48e9-b597-61e497155d42 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.397654] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-570fbdb1-2554-4246-9c59-e87cf1f4ae30 tempest-ServersAdminNegativeTestJSON-1781016140 tempest-ServersAdminNegativeTestJSON-1781016140-project-admin] [instance: a9f83357-4898-44ff-a6d8-ea6621453de9] Suspending the VM {{(pid=62405) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1659.398036] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-3659297f-b99f-4b2c-9f89-1dd764bacbf8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.406835] env[62405]: DEBUG oslo_vmware.api [None req-570fbdb1-2554-4246-9c59-e87cf1f4ae30 tempest-ServersAdminNegativeTestJSON-1781016140 tempest-ServersAdminNegativeTestJSON-1781016140-project-admin] Waiting for the task: (returnval){ [ 1659.406835] env[62405]: value = "task-1947246" [ 1659.406835] env[62405]: _type = "Task" [ 1659.406835] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1659.417523] env[62405]: DEBUG oslo_vmware.api [None req-570fbdb1-2554-4246-9c59-e87cf1f4ae30 tempest-ServersAdminNegativeTestJSON-1781016140 tempest-ServersAdminNegativeTestJSON-1781016140-project-admin] Task: {'id': task-1947246, 'name': SuspendVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1659.464742] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947243, 'name': CreateVM_Task, 'duration_secs': 0.77304} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1659.464952] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6213702e-8e39-4342-b62f-2c9495017bf9] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1659.465772] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1659.465992] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1659.466417] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1659.467084] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-831083f0-6f40-4d9c-bfad-e39f4e7fdb38 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.473916] env[62405]: DEBUG oslo_vmware.api [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Waiting for the task: (returnval){ [ 1659.473916] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52bfc112-841b-257c-2eb6-f004c3c57203" [ 1659.473916] env[62405]: _type = "Task" [ 1659.473916] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1659.485323] env[62405]: DEBUG oslo_vmware.api [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52bfc112-841b-257c-2eb6-f004c3c57203, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1659.518941] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-966cba6a-1b89-4866-bb58-34ddcb775da9 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59957a81-5297-43d3-a673-024a53a19116] Creating Snapshot of the VM instance {{(pid=62405) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1659.519507] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-0bc33515-1b73-4fb8-948d-23e14119ec8e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.528742] env[62405]: DEBUG oslo_vmware.api [None req-966cba6a-1b89-4866-bb58-34ddcb775da9 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Waiting for the task: (returnval){ [ 1659.528742] env[62405]: value = "task-1947248" [ 1659.528742] env[62405]: _type = "Task" [ 1659.528742] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1659.539029] env[62405]: DEBUG oslo_vmware.api [None req-966cba6a-1b89-4866-bb58-34ddcb775da9 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947248, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1659.727383] env[62405]: DEBUG oslo_concurrency.lockutils [req-4b8320a2-0a76-4c94-9e51-f1cd2b4bf03f req-d9bd710e-a5ff-4bd2-afc8-4803f462eb30 service nova] Releasing lock "refresh_cache-23748dfd-7c60-41db-8acb-7b49cf1c27db" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1659.728808] env[62405]: DEBUG nova.compute.manager [req-4b8320a2-0a76-4c94-9e51-f1cd2b4bf03f req-d9bd710e-a5ff-4bd2-afc8-4803f462eb30 service nova] [instance: a6a0e918-425d-44de-a22b-8779e9108533] Received event network-vif-plugged-d504fb4b-5637-4d63-aaa3-5273e3b34481 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1659.728808] env[62405]: DEBUG oslo_concurrency.lockutils [req-4b8320a2-0a76-4c94-9e51-f1cd2b4bf03f req-d9bd710e-a5ff-4bd2-afc8-4803f462eb30 service nova] Acquiring lock "a6a0e918-425d-44de-a22b-8779e9108533-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1659.728808] env[62405]: DEBUG oslo_concurrency.lockutils [req-4b8320a2-0a76-4c94-9e51-f1cd2b4bf03f req-d9bd710e-a5ff-4bd2-afc8-4803f462eb30 service nova] Lock "a6a0e918-425d-44de-a22b-8779e9108533-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1659.728808] env[62405]: DEBUG oslo_concurrency.lockutils [req-4b8320a2-0a76-4c94-9e51-f1cd2b4bf03f req-d9bd710e-a5ff-4bd2-afc8-4803f462eb30 service nova] Lock "a6a0e918-425d-44de-a22b-8779e9108533-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1659.728808] env[62405]: DEBUG nova.compute.manager [req-4b8320a2-0a76-4c94-9e51-f1cd2b4bf03f req-d9bd710e-a5ff-4bd2-afc8-4803f462eb30 service nova] [instance: a6a0e918-425d-44de-a22b-8779e9108533] No waiting events found dispatching network-vif-plugged-d504fb4b-5637-4d63-aaa3-5273e3b34481 {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1659.728808] env[62405]: WARNING nova.compute.manager [req-4b8320a2-0a76-4c94-9e51-f1cd2b4bf03f req-d9bd710e-a5ff-4bd2-afc8-4803f462eb30 service nova] [instance: a6a0e918-425d-44de-a22b-8779e9108533] Received unexpected event network-vif-plugged-d504fb4b-5637-4d63-aaa3-5273e3b34481 for instance with vm_state building and task_state spawning. [ 1659.729121] env[62405]: DEBUG nova.compute.manager [req-4b8320a2-0a76-4c94-9e51-f1cd2b4bf03f req-d9bd710e-a5ff-4bd2-afc8-4803f462eb30 service nova] [instance: a6a0e918-425d-44de-a22b-8779e9108533] Received event network-changed-d504fb4b-5637-4d63-aaa3-5273e3b34481 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1659.729121] env[62405]: DEBUG nova.compute.manager [req-4b8320a2-0a76-4c94-9e51-f1cd2b4bf03f req-d9bd710e-a5ff-4bd2-afc8-4803f462eb30 service nova] [instance: a6a0e918-425d-44de-a22b-8779e9108533] Refreshing instance network info cache due to event network-changed-d504fb4b-5637-4d63-aaa3-5273e3b34481. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1659.729318] env[62405]: DEBUG oslo_concurrency.lockutils [req-4b8320a2-0a76-4c94-9e51-f1cd2b4bf03f req-d9bd710e-a5ff-4bd2-afc8-4803f462eb30 service nova] Acquiring lock "refresh_cache-a6a0e918-425d-44de-a22b-8779e9108533" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1659.729407] env[62405]: DEBUG oslo_concurrency.lockutils [req-4b8320a2-0a76-4c94-9e51-f1cd2b4bf03f req-d9bd710e-a5ff-4bd2-afc8-4803f462eb30 service nova] Acquired lock "refresh_cache-a6a0e918-425d-44de-a22b-8779e9108533" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1659.729529] env[62405]: DEBUG nova.network.neutron [req-4b8320a2-0a76-4c94-9e51-f1cd2b4bf03f req-d9bd710e-a5ff-4bd2-afc8-4803f462eb30 service nova] [instance: a6a0e918-425d-44de-a22b-8779e9108533] Refreshing network info cache for port d504fb4b-5637-4d63-aaa3-5273e3b34481 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1659.786567] env[62405]: DEBUG oslo_concurrency.lockutils [None req-1a24fa03-73e1-478e-aeb2-5c20984033b2 tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Acquiring lock "777ddb84-25b9-4da6-be6b-a2289dbf510a" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1659.786870] env[62405]: DEBUG oslo_concurrency.lockutils [None req-1a24fa03-73e1-478e-aeb2-5c20984033b2 tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Lock "777ddb84-25b9-4da6-be6b-a2289dbf510a" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1659.822850] env[62405]: DEBUG oslo_vmware.api [None req-5974aa0f-586f-47ba-95a5-8e34a83e9507 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947245, 'name': Rename_Task, 'duration_secs': 0.173117} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1659.823488] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-5974aa0f-586f-47ba-95a5-8e34a83e9507 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a6a0e918-425d-44de-a22b-8779e9108533] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1659.823888] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-89777c7e-924d-4d4b-9965-a3350d7b6edf {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.835029] env[62405]: DEBUG oslo_vmware.api [None req-5974aa0f-586f-47ba-95a5-8e34a83e9507 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for the task: (returnval){ [ 1659.835029] env[62405]: value = "task-1947249" [ 1659.835029] env[62405]: _type = "Task" [ 1659.835029] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1659.853619] env[62405]: DEBUG oslo_vmware.api [None req-5974aa0f-586f-47ba-95a5-8e34a83e9507 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947249, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1659.884184] env[62405]: DEBUG oslo_concurrency.lockutils [None req-543444ba-0ef2-4f11-a0fe-b1789c5f199c tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Acquiring lock "0491dc4b-cf35-4035-aca9-baf43b86af7e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1659.884184] env[62405]: DEBUG oslo_concurrency.lockutils [None req-543444ba-0ef2-4f11-a0fe-b1789c5f199c tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Lock "0491dc4b-cf35-4035-aca9-baf43b86af7e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1659.884987] env[62405]: DEBUG oslo_concurrency.lockutils [None req-543444ba-0ef2-4f11-a0fe-b1789c5f199c tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Acquiring lock "0491dc4b-cf35-4035-aca9-baf43b86af7e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1659.884987] env[62405]: DEBUG oslo_concurrency.lockutils [None req-543444ba-0ef2-4f11-a0fe-b1789c5f199c tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Lock "0491dc4b-cf35-4035-aca9-baf43b86af7e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1659.884987] env[62405]: DEBUG oslo_concurrency.lockutils [None req-543444ba-0ef2-4f11-a0fe-b1789c5f199c tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Lock "0491dc4b-cf35-4035-aca9-baf43b86af7e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1659.888187] env[62405]: INFO nova.compute.manager [None req-543444ba-0ef2-4f11-a0fe-b1789c5f199c tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] [instance: 0491dc4b-cf35-4035-aca9-baf43b86af7e] Terminating instance [ 1659.921772] env[62405]: DEBUG oslo_vmware.api [None req-570fbdb1-2554-4246-9c59-e87cf1f4ae30 tempest-ServersAdminNegativeTestJSON-1781016140 tempest-ServersAdminNegativeTestJSON-1781016140-project-admin] Task: {'id': task-1947246, 'name': SuspendVM_Task} progress is 62%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1659.992112] env[62405]: DEBUG oslo_vmware.api [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52bfc112-841b-257c-2eb6-f004c3c57203, 'name': SearchDatastore_Task, 'duration_secs': 0.012533} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1659.992932] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1659.993782] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] [instance: 6213702e-8e39-4342-b62f-2c9495017bf9] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1659.994075] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1659.994247] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1659.994437] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1659.998128] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ffcd08fe-82db-4b98-bf66-5a5e60723d9b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.017844] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1660.018163] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1660.022844] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3d99994f-c122-426e-8836-2cabdf42f529 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.031846] env[62405]: DEBUG oslo_vmware.api [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Waiting for the task: (returnval){ [ 1660.031846] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5272be9c-37c5-6b63-f3ae-953987323cad" [ 1660.031846] env[62405]: _type = "Task" [ 1660.031846] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1660.049029] env[62405]: DEBUG oslo_vmware.api [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5272be9c-37c5-6b63-f3ae-953987323cad, 'name': SearchDatastore_Task, 'duration_secs': 0.010743} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1660.054373] env[62405]: DEBUG oslo_vmware.api [None req-966cba6a-1b89-4866-bb58-34ddcb775da9 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947248, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1660.056023] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-feccadea-bc49-4bff-9b5b-597a2d4a48fc {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.062390] env[62405]: DEBUG oslo_vmware.api [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Waiting for the task: (returnval){ [ 1660.062390] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]524ba977-b881-67fa-8990-d68e8ee708f1" [ 1660.062390] env[62405]: _type = "Task" [ 1660.062390] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1660.076393] env[62405]: DEBUG oslo_vmware.api [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]524ba977-b881-67fa-8990-d68e8ee708f1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1660.226438] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5dd0743-8b03-4160-8586-fb5f9983ffe0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.240231] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df0b5205-99a9-451e-864b-ffb464cd495c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.283413] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a4b23e4-d19b-487f-9771-bebc947b607f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.290670] env[62405]: INFO nova.compute.manager [None req-1a24fa03-73e1-478e-aeb2-5c20984033b2 tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [instance: 777ddb84-25b9-4da6-be6b-a2289dbf510a] Detaching volume a420f725-234f-4c1b-bcf6-23fd04729838 [ 1660.301976] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69cb56a2-a11f-4325-9e3b-a5a6bcdbf518 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.327389] env[62405]: DEBUG nova.compute.provider_tree [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1660.348437] env[62405]: DEBUG oslo_vmware.api [None req-5974aa0f-586f-47ba-95a5-8e34a83e9507 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947249, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1660.354335] env[62405]: INFO nova.virt.block_device [None req-1a24fa03-73e1-478e-aeb2-5c20984033b2 tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [instance: 777ddb84-25b9-4da6-be6b-a2289dbf510a] Attempting to driver detach volume a420f725-234f-4c1b-bcf6-23fd04729838 from mountpoint /dev/sdb [ 1660.354606] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-1a24fa03-73e1-478e-aeb2-5c20984033b2 tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [instance: 777ddb84-25b9-4da6-be6b-a2289dbf510a] Volume detach. Driver type: vmdk {{(pid=62405) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1660.354879] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-1a24fa03-73e1-478e-aeb2-5c20984033b2 tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [instance: 777ddb84-25b9-4da6-be6b-a2289dbf510a] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-401430', 'volume_id': 'a420f725-234f-4c1b-bcf6-23fd04729838', 'name': 'volume-a420f725-234f-4c1b-bcf6-23fd04729838', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '777ddb84-25b9-4da6-be6b-a2289dbf510a', 'attached_at': '', 'detached_at': '', 'volume_id': 'a420f725-234f-4c1b-bcf6-23fd04729838', 'serial': 'a420f725-234f-4c1b-bcf6-23fd04729838'} {{(pid=62405) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1660.355808] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02f68d66-96ef-4c46-8181-0bbb7097da63 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.381077] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af2da6c5-40a4-4530-b18c-eb91322a7e34 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.392823] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffb028f3-293d-498d-8159-9bb5885a75a8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.397377] env[62405]: DEBUG nova.compute.manager [None req-543444ba-0ef2-4f11-a0fe-b1789c5f199c tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] [instance: 0491dc4b-cf35-4035-aca9-baf43b86af7e] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1660.397377] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-543444ba-0ef2-4f11-a0fe-b1789c5f199c tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] [instance: 0491dc4b-cf35-4035-aca9-baf43b86af7e] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1660.397377] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aac2855-856c-4566-bbd1-38dc1413ed0c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.421699] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-543444ba-0ef2-4f11-a0fe-b1789c5f199c tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] [instance: 0491dc4b-cf35-4035-aca9-baf43b86af7e] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1660.422540] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-05a3e483-ffcf-4315-b4c2-891f14820a2b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.427776] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b80054b0-b470-446c-858f-750893ebfde3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.437676] env[62405]: DEBUG oslo_vmware.api [None req-570fbdb1-2554-4246-9c59-e87cf1f4ae30 tempest-ServersAdminNegativeTestJSON-1781016140 tempest-ServersAdminNegativeTestJSON-1781016140-project-admin] Task: {'id': task-1947246, 'name': SuspendVM_Task, 'duration_secs': 0.783043} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1660.454771] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-570fbdb1-2554-4246-9c59-e87cf1f4ae30 tempest-ServersAdminNegativeTestJSON-1781016140 tempest-ServersAdminNegativeTestJSON-1781016140-project-admin] [instance: a9f83357-4898-44ff-a6d8-ea6621453de9] Suspended the VM {{(pid=62405) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1660.454771] env[62405]: DEBUG nova.compute.manager [None req-570fbdb1-2554-4246-9c59-e87cf1f4ae30 tempest-ServersAdminNegativeTestJSON-1781016140 tempest-ServersAdminNegativeTestJSON-1781016140-project-admin] [instance: a9f83357-4898-44ff-a6d8-ea6621453de9] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1660.454771] env[62405]: DEBUG oslo_vmware.api [None req-543444ba-0ef2-4f11-a0fe-b1789c5f199c tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Waiting for the task: (returnval){ [ 1660.454771] env[62405]: value = "task-1947250" [ 1660.454771] env[62405]: _type = "Task" [ 1660.454771] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1660.454771] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-1a24fa03-73e1-478e-aeb2-5c20984033b2 tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] The volume has not been displaced from its original location: [datastore1] volume-a420f725-234f-4c1b-bcf6-23fd04729838/volume-a420f725-234f-4c1b-bcf6-23fd04729838.vmdk. No consolidation needed. {{(pid=62405) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1660.458303] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-1a24fa03-73e1-478e-aeb2-5c20984033b2 tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [instance: 777ddb84-25b9-4da6-be6b-a2289dbf510a] Reconfiguring VM instance instance-00000015 to detach disk 2001 {{(pid=62405) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1660.459187] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99eb0e05-16e6-41b3-9e85-6905ea523ab9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.461903] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-44f4c622-c3d5-4e7f-a9c5-7d963b7440bc {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.494784] env[62405]: DEBUG oslo_vmware.api [None req-1a24fa03-73e1-478e-aeb2-5c20984033b2 tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Waiting for the task: (returnval){ [ 1660.494784] env[62405]: value = "task-1947251" [ 1660.494784] env[62405]: _type = "Task" [ 1660.494784] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1660.495180] env[62405]: DEBUG oslo_vmware.api [None req-543444ba-0ef2-4f11-a0fe-b1789c5f199c tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Task: {'id': task-1947250, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1660.507478] env[62405]: DEBUG oslo_vmware.api [None req-1a24fa03-73e1-478e-aeb2-5c20984033b2 tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Task: {'id': task-1947251, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1660.520876] env[62405]: DEBUG nova.compute.manager [None req-8c1b8076-ff38-43f7-a3f4-0dbc196ee36a tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Stashing vm_state: active {{(pid=62405) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1660.542241] env[62405]: DEBUG oslo_vmware.api [None req-966cba6a-1b89-4866-bb58-34ddcb775da9 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947248, 'name': CreateSnapshot_Task, 'duration_secs': 0.796973} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1660.542608] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-966cba6a-1b89-4866-bb58-34ddcb775da9 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59957a81-5297-43d3-a673-024a53a19116] Created Snapshot of the VM instance {{(pid=62405) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1660.543544] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ca302ce-ee47-49e1-8b16-a6cccf6efc59 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.578587] env[62405]: DEBUG oslo_vmware.api [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]524ba977-b881-67fa-8990-d68e8ee708f1, 'name': SearchDatastore_Task, 'duration_secs': 0.02176} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1660.578969] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1660.579265] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 6213702e-8e39-4342-b62f-2c9495017bf9/6213702e-8e39-4342-b62f-2c9495017bf9.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1660.580388] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7a94ccfd-f85f-4f42-888f-e771688b9105 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.589815] env[62405]: DEBUG oslo_vmware.api [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Waiting for the task: (returnval){ [ 1660.589815] env[62405]: value = "task-1947252" [ 1660.589815] env[62405]: _type = "Task" [ 1660.589815] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1660.600643] env[62405]: DEBUG oslo_vmware.api [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Task: {'id': task-1947252, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1660.793803] env[62405]: DEBUG nova.network.neutron [req-4b8320a2-0a76-4c94-9e51-f1cd2b4bf03f req-d9bd710e-a5ff-4bd2-afc8-4803f462eb30 service nova] [instance: a6a0e918-425d-44de-a22b-8779e9108533] Updated VIF entry in instance network info cache for port d504fb4b-5637-4d63-aaa3-5273e3b34481. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1660.794238] env[62405]: DEBUG nova.network.neutron [req-4b8320a2-0a76-4c94-9e51-f1cd2b4bf03f req-d9bd710e-a5ff-4bd2-afc8-4803f462eb30 service nova] [instance: a6a0e918-425d-44de-a22b-8779e9108533] Updating instance_info_cache with network_info: [{"id": "d504fb4b-5637-4d63-aaa3-5273e3b34481", "address": "fa:16:3e:c1:e4:0a", "network": {"id": "845c4582-a0c8-4565-8025-5cc0fd22ff9a", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1066509768-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f1a1645e38674042828c78155974f95e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd504fb4b-56", "ovs_interfaceid": "d504fb4b-5637-4d63-aaa3-5273e3b34481", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1660.813521] env[62405]: INFO nova.compute.manager [None req-b04e17b9-76b5-488f-8a85-8ffe1da4ec56 tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] Rebuilding instance [ 1660.835482] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2295ed20-ced6-48e8-b941-a33998d2c4ca tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Acquiring lock "f8c6f99f-499f-4886-aae9-5f08969175f6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1660.835699] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2295ed20-ced6-48e8-b941-a33998d2c4ca tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Lock "f8c6f99f-499f-4886-aae9-5f08969175f6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1660.835935] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2295ed20-ced6-48e8-b941-a33998d2c4ca tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Acquiring lock "f8c6f99f-499f-4886-aae9-5f08969175f6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1660.839082] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2295ed20-ced6-48e8-b941-a33998d2c4ca tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Lock "f8c6f99f-499f-4886-aae9-5f08969175f6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1660.839139] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2295ed20-ced6-48e8-b941-a33998d2c4ca tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Lock "f8c6f99f-499f-4886-aae9-5f08969175f6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.003s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1660.844770] env[62405]: INFO nova.compute.manager [None req-2295ed20-ced6-48e8-b941-a33998d2c4ca tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] [instance: f8c6f99f-499f-4886-aae9-5f08969175f6] Terminating instance [ 1660.867355] env[62405]: ERROR nova.scheduler.client.report [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] [req-eba36cca-5b65-4b78-9b92-5cf7aad000b1] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7d5eded7-a501-4fa6-b1d3-60e273d555d7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-eba36cca-5b65-4b78-9b92-5cf7aad000b1"}]} [ 1660.877539] env[62405]: DEBUG oslo_vmware.api [None req-5974aa0f-586f-47ba-95a5-8e34a83e9507 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947249, 'name': PowerOnVM_Task, 'duration_secs': 0.557792} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1660.877539] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-5974aa0f-586f-47ba-95a5-8e34a83e9507 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a6a0e918-425d-44de-a22b-8779e9108533] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1660.878526] env[62405]: INFO nova.compute.manager [None req-5974aa0f-586f-47ba-95a5-8e34a83e9507 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a6a0e918-425d-44de-a22b-8779e9108533] Took 7.58 seconds to spawn the instance on the hypervisor. [ 1660.878806] env[62405]: DEBUG nova.compute.manager [None req-5974aa0f-586f-47ba-95a5-8e34a83e9507 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a6a0e918-425d-44de-a22b-8779e9108533] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1660.879929] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-433744dc-9eaf-46d7-8db6-a3b411100df7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.896057] env[62405]: DEBUG nova.compute.manager [None req-b04e17b9-76b5-488f-8a85-8ffe1da4ec56 tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1660.896868] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-828d8018-09fc-43fe-b65a-7de740d5da79 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.902020] env[62405]: DEBUG nova.scheduler.client.report [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Refreshing inventories for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1660.923535] env[62405]: DEBUG nova.scheduler.client.report [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Updating ProviderTree inventory for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1660.923834] env[62405]: DEBUG nova.compute.provider_tree [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1660.937540] env[62405]: DEBUG nova.scheduler.client.report [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Refreshing aggregate associations for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7, aggregates: None {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1660.971250] env[62405]: DEBUG nova.scheduler.client.report [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Refreshing trait associations for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1660.976215] env[62405]: DEBUG oslo_vmware.api [None req-543444ba-0ef2-4f11-a0fe-b1789c5f199c tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Task: {'id': task-1947250, 'name': PowerOffVM_Task, 'duration_secs': 0.184518} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1660.976490] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-543444ba-0ef2-4f11-a0fe-b1789c5f199c tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] [instance: 0491dc4b-cf35-4035-aca9-baf43b86af7e] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1660.976657] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-543444ba-0ef2-4f11-a0fe-b1789c5f199c tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] [instance: 0491dc4b-cf35-4035-aca9-baf43b86af7e] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1660.976900] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2d7429e4-0697-4fac-9fd1-74dc8c42cbc4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.012554] env[62405]: DEBUG oslo_vmware.api [None req-1a24fa03-73e1-478e-aeb2-5c20984033b2 tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Task: {'id': task-1947251, 'name': ReconfigVM_Task, 'duration_secs': 0.257686} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1661.015561] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-1a24fa03-73e1-478e-aeb2-5c20984033b2 tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [instance: 777ddb84-25b9-4da6-be6b-a2289dbf510a] Reconfigured VM instance instance-00000015 to detach disk 2001 {{(pid=62405) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1661.020906] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bdf0a294-d4dc-4def-936c-1a145ad3eff4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.049632] env[62405]: DEBUG oslo_vmware.api [None req-1a24fa03-73e1-478e-aeb2-5c20984033b2 tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Waiting for the task: (returnval){ [ 1661.049632] env[62405]: value = "task-1947254" [ 1661.049632] env[62405]: _type = "Task" [ 1661.049632] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1661.054401] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8c1b8076-ff38-43f7-a3f4-0dbc196ee36a tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1661.063785] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-966cba6a-1b89-4866-bb58-34ddcb775da9 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59957a81-5297-43d3-a673-024a53a19116] Creating linked-clone VM from snapshot {{(pid=62405) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1661.065255] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-65bc4a71-6b8a-4f6e-b74f-a101a5aeeeb4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.072216] env[62405]: DEBUG oslo_vmware.api [None req-1a24fa03-73e1-478e-aeb2-5c20984033b2 tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Task: {'id': task-1947254, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1661.075458] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-543444ba-0ef2-4f11-a0fe-b1789c5f199c tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] [instance: 0491dc4b-cf35-4035-aca9-baf43b86af7e] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1661.075458] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-543444ba-0ef2-4f11-a0fe-b1789c5f199c tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] [instance: 0491dc4b-cf35-4035-aca9-baf43b86af7e] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1661.075458] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-543444ba-0ef2-4f11-a0fe-b1789c5f199c tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Deleting the datastore file [datastore1] 0491dc4b-cf35-4035-aca9-baf43b86af7e {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1661.075458] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b92866f6-0a9c-4f94-8db7-e3d47c2db492 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.083031] env[62405]: DEBUG oslo_vmware.api [None req-966cba6a-1b89-4866-bb58-34ddcb775da9 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Waiting for the task: (returnval){ [ 1661.083031] env[62405]: value = "task-1947255" [ 1661.083031] env[62405]: _type = "Task" [ 1661.083031] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1661.088857] env[62405]: DEBUG oslo_vmware.api [None req-543444ba-0ef2-4f11-a0fe-b1789c5f199c tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Waiting for the task: (returnval){ [ 1661.088857] env[62405]: value = "task-1947256" [ 1661.088857] env[62405]: _type = "Task" [ 1661.088857] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1661.100453] env[62405]: DEBUG oslo_vmware.api [None req-966cba6a-1b89-4866-bb58-34ddcb775da9 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947255, 'name': CloneVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1661.105781] env[62405]: DEBUG oslo_vmware.api [None req-543444ba-0ef2-4f11-a0fe-b1789c5f199c tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Task: {'id': task-1947256, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1661.112431] env[62405]: DEBUG oslo_vmware.api [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Task: {'id': task-1947252, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1661.299892] env[62405]: DEBUG oslo_concurrency.lockutils [req-4b8320a2-0a76-4c94-9e51-f1cd2b4bf03f req-d9bd710e-a5ff-4bd2-afc8-4803f462eb30 service nova] Releasing lock "refresh_cache-a6a0e918-425d-44de-a22b-8779e9108533" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1661.300458] env[62405]: DEBUG nova.compute.manager [req-4b8320a2-0a76-4c94-9e51-f1cd2b4bf03f req-d9bd710e-a5ff-4bd2-afc8-4803f462eb30 service nova] [instance: c392d6f3-b638-4857-826d-760c38b7d291] Received event network-vif-deleted-e766daac-fbcb-489e-aef5-d97530246eb0 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1661.300790] env[62405]: DEBUG nova.compute.manager [req-4b8320a2-0a76-4c94-9e51-f1cd2b4bf03f req-d9bd710e-a5ff-4bd2-afc8-4803f462eb30 service nova] [instance: 6213702e-8e39-4342-b62f-2c9495017bf9] Received event network-vif-plugged-f6fffc80-6395-4f72-8a63-b037918502c8 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1661.301134] env[62405]: DEBUG oslo_concurrency.lockutils [req-4b8320a2-0a76-4c94-9e51-f1cd2b4bf03f req-d9bd710e-a5ff-4bd2-afc8-4803f462eb30 service nova] Acquiring lock "6213702e-8e39-4342-b62f-2c9495017bf9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1661.301459] env[62405]: DEBUG oslo_concurrency.lockutils [req-4b8320a2-0a76-4c94-9e51-f1cd2b4bf03f req-d9bd710e-a5ff-4bd2-afc8-4803f462eb30 service nova] Lock "6213702e-8e39-4342-b62f-2c9495017bf9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1661.301793] env[62405]: DEBUG oslo_concurrency.lockutils [req-4b8320a2-0a76-4c94-9e51-f1cd2b4bf03f req-d9bd710e-a5ff-4bd2-afc8-4803f462eb30 service nova] Lock "6213702e-8e39-4342-b62f-2c9495017bf9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1661.302094] env[62405]: DEBUG nova.compute.manager [req-4b8320a2-0a76-4c94-9e51-f1cd2b4bf03f req-d9bd710e-a5ff-4bd2-afc8-4803f462eb30 service nova] [instance: 6213702e-8e39-4342-b62f-2c9495017bf9] No waiting events found dispatching network-vif-plugged-f6fffc80-6395-4f72-8a63-b037918502c8 {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1661.302397] env[62405]: WARNING nova.compute.manager [req-4b8320a2-0a76-4c94-9e51-f1cd2b4bf03f req-d9bd710e-a5ff-4bd2-afc8-4803f462eb30 service nova] [instance: 6213702e-8e39-4342-b62f-2c9495017bf9] Received unexpected event network-vif-plugged-f6fffc80-6395-4f72-8a63-b037918502c8 for instance with vm_state building and task_state spawning. [ 1661.302689] env[62405]: DEBUG nova.compute.manager [req-4b8320a2-0a76-4c94-9e51-f1cd2b4bf03f req-d9bd710e-a5ff-4bd2-afc8-4803f462eb30 service nova] [instance: 6213702e-8e39-4342-b62f-2c9495017bf9] Received event network-changed-f6fffc80-6395-4f72-8a63-b037918502c8 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1661.302952] env[62405]: DEBUG nova.compute.manager [req-4b8320a2-0a76-4c94-9e51-f1cd2b4bf03f req-d9bd710e-a5ff-4bd2-afc8-4803f462eb30 service nova] [instance: 6213702e-8e39-4342-b62f-2c9495017bf9] Refreshing instance network info cache due to event network-changed-f6fffc80-6395-4f72-8a63-b037918502c8. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1661.303281] env[62405]: DEBUG oslo_concurrency.lockutils [req-4b8320a2-0a76-4c94-9e51-f1cd2b4bf03f req-d9bd710e-a5ff-4bd2-afc8-4803f462eb30 service nova] Acquiring lock "refresh_cache-6213702e-8e39-4342-b62f-2c9495017bf9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1661.303536] env[62405]: DEBUG oslo_concurrency.lockutils [req-4b8320a2-0a76-4c94-9e51-f1cd2b4bf03f req-d9bd710e-a5ff-4bd2-afc8-4803f462eb30 service nova] Acquired lock "refresh_cache-6213702e-8e39-4342-b62f-2c9495017bf9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1661.303871] env[62405]: DEBUG nova.network.neutron [req-4b8320a2-0a76-4c94-9e51-f1cd2b4bf03f req-d9bd710e-a5ff-4bd2-afc8-4803f462eb30 service nova] [instance: 6213702e-8e39-4342-b62f-2c9495017bf9] Refreshing network info cache for port f6fffc80-6395-4f72-8a63-b037918502c8 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1661.368789] env[62405]: DEBUG nova.compute.manager [None req-2295ed20-ced6-48e8-b941-a33998d2c4ca tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] [instance: f8c6f99f-499f-4886-aae9-5f08969175f6] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1661.369020] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-2295ed20-ced6-48e8-b941-a33998d2c4ca tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] [instance: f8c6f99f-499f-4886-aae9-5f08969175f6] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1661.373889] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c65eb4a-f34b-4ff2-bb3e-7a436c6e9455 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.385318] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-2295ed20-ced6-48e8-b941-a33998d2c4ca tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] [instance: f8c6f99f-499f-4886-aae9-5f08969175f6] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1661.385613] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ca46e549-5eeb-4a62-b6fe-daf488d4846d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.395112] env[62405]: DEBUG oslo_vmware.api [None req-2295ed20-ced6-48e8-b941-a33998d2c4ca tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Waiting for the task: (returnval){ [ 1661.395112] env[62405]: value = "task-1947257" [ 1661.395112] env[62405]: _type = "Task" [ 1661.395112] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1661.410310] env[62405]: INFO nova.compute.manager [None req-5974aa0f-586f-47ba-95a5-8e34a83e9507 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a6a0e918-425d-44de-a22b-8779e9108533] Took 40.33 seconds to build instance. [ 1661.427883] env[62405]: DEBUG oslo_vmware.api [None req-2295ed20-ced6-48e8-b941-a33998d2c4ca tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Task: {'id': task-1947257, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1661.565904] env[62405]: DEBUG oslo_vmware.api [None req-1a24fa03-73e1-478e-aeb2-5c20984033b2 tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Task: {'id': task-1947254, 'name': ReconfigVM_Task, 'duration_secs': 0.394933} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1661.566320] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-1a24fa03-73e1-478e-aeb2-5c20984033b2 tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [instance: 777ddb84-25b9-4da6-be6b-a2289dbf510a] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-401430', 'volume_id': 'a420f725-234f-4c1b-bcf6-23fd04729838', 'name': 'volume-a420f725-234f-4c1b-bcf6-23fd04729838', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '777ddb84-25b9-4da6-be6b-a2289dbf510a', 'attached_at': '', 'detached_at': '', 'volume_id': 'a420f725-234f-4c1b-bcf6-23fd04729838', 'serial': 'a420f725-234f-4c1b-bcf6-23fd04729838'} {{(pid=62405) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1661.601755] env[62405]: DEBUG oslo_vmware.api [None req-966cba6a-1b89-4866-bb58-34ddcb775da9 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947255, 'name': CloneVM_Task} progress is 94%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1661.607680] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da62e053-799e-4a55-8ebe-42400d6f4853 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.614197] env[62405]: DEBUG oslo_vmware.api [None req-543444ba-0ef2-4f11-a0fe-b1789c5f199c tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Task: {'id': task-1947256, 'name': DeleteDatastoreFile_Task} progress is 100%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1661.623550] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a9d8e27-6e29-433a-be85-382b96e25a33 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.628126] env[62405]: DEBUG oslo_vmware.api [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Task: {'id': task-1947252, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.911238} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1661.628427] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 6213702e-8e39-4342-b62f-2c9495017bf9/6213702e-8e39-4342-b62f-2c9495017bf9.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1661.628683] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] [instance: 6213702e-8e39-4342-b62f-2c9495017bf9] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1661.629660] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d8b0173b-91e7-44ee-9599-e974ff898bd8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.666147] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5419cbb-55f3-4eba-a9d0-5f64698459f6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.669322] env[62405]: DEBUG oslo_vmware.api [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Waiting for the task: (returnval){ [ 1661.669322] env[62405]: value = "task-1947258" [ 1661.669322] env[62405]: _type = "Task" [ 1661.669322] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1661.676965] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef92f689-3e88-4818-b390-1ee08e52c471 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.687714] env[62405]: DEBUG oslo_vmware.api [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Task: {'id': task-1947258, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1661.698566] env[62405]: DEBUG nova.compute.provider_tree [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1661.717174] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-fedbb7f2-75ea-41de-b4e7-8164335aee2d tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 67bf25ea-5774-4246-a3e6-2aeb0ebf6731] Volume attach. Driver type: vmdk {{(pid=62405) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1661.717410] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-fedbb7f2-75ea-41de-b4e7-8164335aee2d tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 67bf25ea-5774-4246-a3e6-2aeb0ebf6731] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-401433', 'volume_id': 'dd61f776-0703-457f-8823-3fc5792787a0', 'name': 'volume-dd61f776-0703-457f-8823-3fc5792787a0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '67bf25ea-5774-4246-a3e6-2aeb0ebf6731', 'attached_at': '', 'detached_at': '', 'volume_id': 'dd61f776-0703-457f-8823-3fc5792787a0', 'serial': 'dd61f776-0703-457f-8823-3fc5792787a0'} {{(pid=62405) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1661.718303] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e8535ae-8d8e-4679-b6ee-5268a42c0d3c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.736782] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dad9ab8b-04a6-49e9-912c-86b94a415677 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.763876] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-fedbb7f2-75ea-41de-b4e7-8164335aee2d tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 67bf25ea-5774-4246-a3e6-2aeb0ebf6731] Reconfiguring VM instance instance-00000024 to attach disk [datastore1] volume-dd61f776-0703-457f-8823-3fc5792787a0/volume-dd61f776-0703-457f-8823-3fc5792787a0.vmdk or device None with type thin {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1661.763876] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6878cfe7-5297-4625-96f5-8ba03b15667f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.782884] env[62405]: DEBUG oslo_vmware.api [None req-fedbb7f2-75ea-41de-b4e7-8164335aee2d tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Waiting for the task: (returnval){ [ 1661.782884] env[62405]: value = "task-1947259" [ 1661.782884] env[62405]: _type = "Task" [ 1661.782884] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1661.792365] env[62405]: DEBUG oslo_vmware.api [None req-fedbb7f2-75ea-41de-b4e7-8164335aee2d tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1947259, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1661.915066] env[62405]: DEBUG oslo_vmware.api [None req-2295ed20-ced6-48e8-b941-a33998d2c4ca tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Task: {'id': task-1947257, 'name': PowerOffVM_Task, 'duration_secs': 0.26831} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1661.915339] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5974aa0f-586f-47ba-95a5-8e34a83e9507 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Lock "a6a0e918-425d-44de-a22b-8779e9108533" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 67.467s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1661.915411] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-2295ed20-ced6-48e8-b941-a33998d2c4ca tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] [instance: f8c6f99f-499f-4886-aae9-5f08969175f6] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1661.915585] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-2295ed20-ced6-48e8-b941-a33998d2c4ca tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] [instance: f8c6f99f-499f-4886-aae9-5f08969175f6] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1661.915943] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-eaa39854-b895-4f49-a01b-d8835a108ef7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.922040] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-b04e17b9-76b5-488f-8a85-8ffe1da4ec56 tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1661.922040] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-983cbbaa-4a92-4e03-99b2-0d15ba566594 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.930710] env[62405]: DEBUG oslo_vmware.api [None req-b04e17b9-76b5-488f-8a85-8ffe1da4ec56 tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] Waiting for the task: (returnval){ [ 1661.930710] env[62405]: value = "task-1947261" [ 1661.930710] env[62405]: _type = "Task" [ 1661.930710] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1661.938671] env[62405]: DEBUG oslo_vmware.api [None req-b04e17b9-76b5-488f-8a85-8ffe1da4ec56 tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] Task: {'id': task-1947261, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1662.025864] env[62405]: DEBUG oslo_concurrency.lockutils [None req-18ca5fbd-bc81-43c5-8f75-59042c3ac786 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquiring lock "interface-23748dfd-7c60-41db-8acb-7b49cf1c27db-2a761bb8-a966-4a87-98b7-183fc71da74b" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1662.026145] env[62405]: DEBUG oslo_concurrency.lockutils [None req-18ca5fbd-bc81-43c5-8f75-59042c3ac786 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Lock "interface-23748dfd-7c60-41db-8acb-7b49cf1c27db-2a761bb8-a966-4a87-98b7-183fc71da74b" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1662.036499] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-2295ed20-ced6-48e8-b941-a33998d2c4ca tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] [instance: f8c6f99f-499f-4886-aae9-5f08969175f6] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1662.036925] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-2295ed20-ced6-48e8-b941-a33998d2c4ca tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] [instance: f8c6f99f-499f-4886-aae9-5f08969175f6] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1662.037073] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-2295ed20-ced6-48e8-b941-a33998d2c4ca tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Deleting the datastore file [datastore1] f8c6f99f-499f-4886-aae9-5f08969175f6 {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1662.037309] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a0dec4f6-07cf-414d-bca9-607541ce1b65 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.050243] env[62405]: DEBUG oslo_vmware.api [None req-2295ed20-ced6-48e8-b941-a33998d2c4ca tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Waiting for the task: (returnval){ [ 1662.050243] env[62405]: value = "task-1947262" [ 1662.050243] env[62405]: _type = "Task" [ 1662.050243] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1662.061413] env[62405]: DEBUG oslo_vmware.api [None req-2295ed20-ced6-48e8-b941-a33998d2c4ca tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Task: {'id': task-1947262, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1662.096745] env[62405]: DEBUG oslo_vmware.api [None req-966cba6a-1b89-4866-bb58-34ddcb775da9 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947255, 'name': CloneVM_Task} progress is 94%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1662.103127] env[62405]: DEBUG oslo_vmware.api [None req-543444ba-0ef2-4f11-a0fe-b1789c5f199c tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Task: {'id': task-1947256, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.529387} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1662.103127] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-543444ba-0ef2-4f11-a0fe-b1789c5f199c tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1662.103512] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-543444ba-0ef2-4f11-a0fe-b1789c5f199c tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] [instance: 0491dc4b-cf35-4035-aca9-baf43b86af7e] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1662.103512] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-543444ba-0ef2-4f11-a0fe-b1789c5f199c tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] [instance: 0491dc4b-cf35-4035-aca9-baf43b86af7e] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1662.103512] env[62405]: INFO nova.compute.manager [None req-543444ba-0ef2-4f11-a0fe-b1789c5f199c tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] [instance: 0491dc4b-cf35-4035-aca9-baf43b86af7e] Took 1.71 seconds to destroy the instance on the hypervisor. [ 1662.103749] env[62405]: DEBUG oslo.service.loopingcall [None req-543444ba-0ef2-4f11-a0fe-b1789c5f199c tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1662.103982] env[62405]: DEBUG nova.compute.manager [-] [instance: 0491dc4b-cf35-4035-aca9-baf43b86af7e] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1662.104123] env[62405]: DEBUG nova.network.neutron [-] [instance: 0491dc4b-cf35-4035-aca9-baf43b86af7e] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1662.136442] env[62405]: DEBUG nova.objects.instance [None req-1a24fa03-73e1-478e-aeb2-5c20984033b2 tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Lazy-loading 'flavor' on Instance uuid 777ddb84-25b9-4da6-be6b-a2289dbf510a {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1662.193997] env[62405]: DEBUG oslo_vmware.api [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Task: {'id': task-1947258, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.114047} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1662.197743] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] [instance: 6213702e-8e39-4342-b62f-2c9495017bf9] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1662.198627] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a8de6ea-0cf8-4091-abf0-4530ed2c3399 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.228386] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] [instance: 6213702e-8e39-4342-b62f-2c9495017bf9] Reconfiguring VM instance instance-00000030 to attach disk [datastore1] 6213702e-8e39-4342-b62f-2c9495017bf9/6213702e-8e39-4342-b62f-2c9495017bf9.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1662.229587] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-128f2654-e639-47c4-b18e-d3dd08543bfe {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.245839] env[62405]: DEBUG nova.scheduler.client.report [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Updated inventory for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with generation 77 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1662.246158] env[62405]: DEBUG nova.compute.provider_tree [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Updating resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 generation from 77 to 78 during operation: update_inventory {{(pid=62405) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1662.246382] env[62405]: DEBUG nova.compute.provider_tree [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1662.254081] env[62405]: DEBUG oslo_vmware.api [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Waiting for the task: (returnval){ [ 1662.254081] env[62405]: value = "task-1947263" [ 1662.254081] env[62405]: _type = "Task" [ 1662.254081] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1662.263224] env[62405]: DEBUG oslo_vmware.api [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Task: {'id': task-1947263, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1662.305656] env[62405]: DEBUG oslo_vmware.api [None req-fedbb7f2-75ea-41de-b4e7-8164335aee2d tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1947259, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1662.314815] env[62405]: DEBUG nova.network.neutron [req-4b8320a2-0a76-4c94-9e51-f1cd2b4bf03f req-d9bd710e-a5ff-4bd2-afc8-4803f462eb30 service nova] [instance: 6213702e-8e39-4342-b62f-2c9495017bf9] Updated VIF entry in instance network info cache for port f6fffc80-6395-4f72-8a63-b037918502c8. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1662.315315] env[62405]: DEBUG nova.network.neutron [req-4b8320a2-0a76-4c94-9e51-f1cd2b4bf03f req-d9bd710e-a5ff-4bd2-afc8-4803f462eb30 service nova] [instance: 6213702e-8e39-4342-b62f-2c9495017bf9] Updating instance_info_cache with network_info: [{"id": "f6fffc80-6395-4f72-8a63-b037918502c8", "address": "fa:16:3e:e9:4c:f5", "network": {"id": "bdf0ffbc-8220-49ae-80a5-06dfea99bea9", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1271406419-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "024f8c817a3142b983afd4018e025452", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ff90ec9-3c7e-4e76-b409-fcf37fc588d8", "external-id": "nsx-vlan-transportzone-475", "segmentation_id": 475, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf6fffc80-63", "ovs_interfaceid": "f6fffc80-6395-4f72-8a63-b037918502c8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1662.441726] env[62405]: DEBUG oslo_vmware.api [None req-b04e17b9-76b5-488f-8a85-8ffe1da4ec56 tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] Task: {'id': task-1947261, 'name': PowerOffVM_Task, 'duration_secs': 0.267214} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1662.442071] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-b04e17b9-76b5-488f-8a85-8ffe1da4ec56 tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1662.442782] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-b04e17b9-76b5-488f-8a85-8ffe1da4ec56 tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1662.443150] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c5331734-25de-44a0-b0dd-289f10eacc4d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.452729] env[62405]: DEBUG oslo_vmware.api [None req-b04e17b9-76b5-488f-8a85-8ffe1da4ec56 tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] Waiting for the task: (returnval){ [ 1662.452729] env[62405]: value = "task-1947264" [ 1662.452729] env[62405]: _type = "Task" [ 1662.452729] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1662.465241] env[62405]: DEBUG oslo_vmware.api [None req-b04e17b9-76b5-488f-8a85-8ffe1da4ec56 tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] Task: {'id': task-1947264, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1662.529232] env[62405]: DEBUG oslo_concurrency.lockutils [None req-18ca5fbd-bc81-43c5-8f75-59042c3ac786 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquiring lock "23748dfd-7c60-41db-8acb-7b49cf1c27db" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1662.529472] env[62405]: DEBUG oslo_concurrency.lockutils [None req-18ca5fbd-bc81-43c5-8f75-59042c3ac786 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquired lock "23748dfd-7c60-41db-8acb-7b49cf1c27db" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1662.530439] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-113a862d-7cdf-45ca-841f-74b28778b04f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.556071] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47b29073-a9ae-49b4-ab68-f9c115246070 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.564696] env[62405]: DEBUG oslo_vmware.api [None req-2295ed20-ced6-48e8-b941-a33998d2c4ca tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Task: {'id': task-1947262, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.280414} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1662.580867] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-2295ed20-ced6-48e8-b941-a33998d2c4ca tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1662.581137] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-2295ed20-ced6-48e8-b941-a33998d2c4ca tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] [instance: f8c6f99f-499f-4886-aae9-5f08969175f6] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1662.581292] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-2295ed20-ced6-48e8-b941-a33998d2c4ca tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] [instance: f8c6f99f-499f-4886-aae9-5f08969175f6] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1662.582382] env[62405]: INFO nova.compute.manager [None req-2295ed20-ced6-48e8-b941-a33998d2c4ca tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] [instance: f8c6f99f-499f-4886-aae9-5f08969175f6] Took 1.21 seconds to destroy the instance on the hypervisor. [ 1662.582702] env[62405]: DEBUG oslo.service.loopingcall [None req-2295ed20-ced6-48e8-b941-a33998d2c4ca tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1662.589271] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-18ca5fbd-bc81-43c5-8f75-59042c3ac786 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 23748dfd-7c60-41db-8acb-7b49cf1c27db] Reconfiguring VM to detach interface {{(pid=62405) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1662.589271] env[62405]: DEBUG nova.compute.manager [-] [instance: f8c6f99f-499f-4886-aae9-5f08969175f6] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1662.589271] env[62405]: DEBUG nova.network.neutron [-] [instance: f8c6f99f-499f-4886-aae9-5f08969175f6] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1662.591203] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-17fd0889-7ceb-457b-b47e-e863d5a9128a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.619952] env[62405]: DEBUG oslo_vmware.api [None req-966cba6a-1b89-4866-bb58-34ddcb775da9 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947255, 'name': CloneVM_Task} progress is 95%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1662.625027] env[62405]: DEBUG oslo_vmware.api [None req-18ca5fbd-bc81-43c5-8f75-59042c3ac786 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Waiting for the task: (returnval){ [ 1662.625027] env[62405]: value = "task-1947265" [ 1662.625027] env[62405]: _type = "Task" [ 1662.625027] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1662.632293] env[62405]: DEBUG oslo_vmware.api [None req-18ca5fbd-bc81-43c5-8f75-59042c3ac786 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947265, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1662.752887] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.105s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1662.753623] env[62405]: DEBUG nova.compute.manager [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] [instance: 377365a4-7538-4bab-a181-1940e6fb4066] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1662.756577] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a4569a7b-d444-4557-8de7-7b81f13bbb46 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.287s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1662.756878] env[62405]: DEBUG nova.objects.instance [None req-a4569a7b-d444-4557-8de7-7b81f13bbb46 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Lazy-loading 'resources' on Instance uuid b21dc1e7-dacd-4154-9bc3-0fa3774695a8 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1662.768893] env[62405]: DEBUG oslo_vmware.api [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Task: {'id': task-1947263, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1662.794320] env[62405]: DEBUG oslo_vmware.api [None req-fedbb7f2-75ea-41de-b4e7-8164335aee2d tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1947259, 'name': ReconfigVM_Task, 'duration_secs': 0.591193} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1662.794554] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-fedbb7f2-75ea-41de-b4e7-8164335aee2d tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 67bf25ea-5774-4246-a3e6-2aeb0ebf6731] Reconfigured VM instance instance-00000024 to attach disk [datastore1] volume-dd61f776-0703-457f-8823-3fc5792787a0/volume-dd61f776-0703-457f-8823-3fc5792787a0.vmdk or device None with type thin {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1662.799732] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-28d27cb2-1bcb-4255-be09-ab781c7f2da3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.815908] env[62405]: DEBUG oslo_vmware.api [None req-fedbb7f2-75ea-41de-b4e7-8164335aee2d tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Waiting for the task: (returnval){ [ 1662.815908] env[62405]: value = "task-1947266" [ 1662.815908] env[62405]: _type = "Task" [ 1662.815908] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1662.819880] env[62405]: DEBUG oslo_concurrency.lockutils [req-4b8320a2-0a76-4c94-9e51-f1cd2b4bf03f req-d9bd710e-a5ff-4bd2-afc8-4803f462eb30 service nova] Releasing lock "refresh_cache-6213702e-8e39-4342-b62f-2c9495017bf9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1662.826237] env[62405]: DEBUG oslo_vmware.api [None req-fedbb7f2-75ea-41de-b4e7-8164335aee2d tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1947266, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1662.963114] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-b04e17b9-76b5-488f-8a85-8ffe1da4ec56 tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] VM already powered off {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1662.963461] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-b04e17b9-76b5-488f-8a85-8ffe1da4ec56 tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] Volume detach. Driver type: vmdk {{(pid=62405) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1662.963618] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-b04e17b9-76b5-488f-8a85-8ffe1da4ec56 tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-401333', 'volume_id': '2c06d022-a782-4194-9dee-348bf3888516', 'name': 'volume-2c06d022-a782-4194-9dee-348bf3888516', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9', 'attached_at': '', 'detached_at': '', 'volume_id': '2c06d022-a782-4194-9dee-348bf3888516', 'serial': '2c06d022-a782-4194-9dee-348bf3888516'} {{(pid=62405) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1662.967164] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d9622fb-f324-4448-9f77-3347f40b9218 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.986268] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f012a164-538a-45d2-b70a-29f49bb4b430 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.999230] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c444c4e-5d19-4759-b591-11bd84985eed {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.023085] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16025c98-d4f6-4454-999e-0b75222ba3e3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.043246] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-b04e17b9-76b5-488f-8a85-8ffe1da4ec56 tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] The volume has not been displaced from its original location: [datastore1] volume-2c06d022-a782-4194-9dee-348bf3888516/volume-2c06d022-a782-4194-9dee-348bf3888516.vmdk. No consolidation needed. {{(pid=62405) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1663.049948] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-b04e17b9-76b5-488f-8a85-8ffe1da4ec56 tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] Reconfiguring VM instance instance-0000001c to detach disk 2000 {{(pid=62405) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1663.050383] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-177b82b4-1496-4783-b5fa-fba63a1f3d9e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.072423] env[62405]: DEBUG oslo_vmware.api [None req-b04e17b9-76b5-488f-8a85-8ffe1da4ec56 tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] Waiting for the task: (returnval){ [ 1663.072423] env[62405]: value = "task-1947267" [ 1663.072423] env[62405]: _type = "Task" [ 1663.072423] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1663.081867] env[62405]: DEBUG oslo_vmware.api [None req-b04e17b9-76b5-488f-8a85-8ffe1da4ec56 tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] Task: {'id': task-1947267, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1663.102236] env[62405]: DEBUG oslo_vmware.api [None req-966cba6a-1b89-4866-bb58-34ddcb775da9 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947255, 'name': CloneVM_Task, 'duration_secs': 1.703307} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1663.102589] env[62405]: INFO nova.virt.vmwareapi.vmops [None req-966cba6a-1b89-4866-bb58-34ddcb775da9 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59957a81-5297-43d3-a673-024a53a19116] Created linked-clone VM from snapshot [ 1663.103484] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3296295e-b03a-4c64-bcc9-0dd4ce849e19 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.113219] env[62405]: DEBUG nova.virt.vmwareapi.images [None req-966cba6a-1b89-4866-bb58-34ddcb775da9 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59957a81-5297-43d3-a673-024a53a19116] Uploading image 7eeaa30f-03d7-4843-a1e3-469c7b5b30d5 {{(pid=62405) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1663.135460] env[62405]: DEBUG oslo_vmware.api [None req-18ca5fbd-bc81-43c5-8f75-59042c3ac786 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947265, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1663.143996] env[62405]: DEBUG oslo_vmware.rw_handles [None req-966cba6a-1b89-4866-bb58-34ddcb775da9 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1663.143996] env[62405]: value = "vm-401438" [ 1663.143996] env[62405]: _type = "VirtualMachine" [ 1663.143996] env[62405]: }. {{(pid=62405) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1663.145282] env[62405]: DEBUG oslo_concurrency.lockutils [None req-1a24fa03-73e1-478e-aeb2-5c20984033b2 tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Lock "777ddb84-25b9-4da6-be6b-a2289dbf510a" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.358s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1663.145620] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-9993e89c-3f24-4a27-b449-e137aa4c9d55 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.155311] env[62405]: DEBUG oslo_vmware.rw_handles [None req-966cba6a-1b89-4866-bb58-34ddcb775da9 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Lease: (returnval){ [ 1663.155311] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]522170e7-4712-aa73-99fc-3d07de816df4" [ 1663.155311] env[62405]: _type = "HttpNfcLease" [ 1663.155311] env[62405]: } obtained for exporting VM: (result){ [ 1663.155311] env[62405]: value = "vm-401438" [ 1663.155311] env[62405]: _type = "VirtualMachine" [ 1663.155311] env[62405]: }. {{(pid=62405) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1663.155618] env[62405]: DEBUG oslo_vmware.api [None req-966cba6a-1b89-4866-bb58-34ddcb775da9 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Waiting for the lease: (returnval){ [ 1663.155618] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]522170e7-4712-aa73-99fc-3d07de816df4" [ 1663.155618] env[62405]: _type = "HttpNfcLease" [ 1663.155618] env[62405]: } to be ready. {{(pid=62405) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1663.162654] env[62405]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1663.162654] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]522170e7-4712-aa73-99fc-3d07de816df4" [ 1663.162654] env[62405]: _type = "HttpNfcLease" [ 1663.162654] env[62405]: } is initializing. {{(pid=62405) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1663.260039] env[62405]: DEBUG nova.compute.utils [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1663.275995] env[62405]: DEBUG nova.compute.manager [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] [instance: 377365a4-7538-4bab-a181-1940e6fb4066] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1663.278142] env[62405]: DEBUG nova.network.neutron [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] [instance: 377365a4-7538-4bab-a181-1940e6fb4066] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1663.288240] env[62405]: DEBUG oslo_vmware.api [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Task: {'id': task-1947263, 'name': ReconfigVM_Task, 'duration_secs': 0.54224} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1663.288668] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] [instance: 6213702e-8e39-4342-b62f-2c9495017bf9] Reconfigured VM instance instance-00000030 to attach disk [datastore1] 6213702e-8e39-4342-b62f-2c9495017bf9/6213702e-8e39-4342-b62f-2c9495017bf9.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1663.289311] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-33207961-710c-4017-9081-8946814b992d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.298785] env[62405]: DEBUG oslo_vmware.api [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Waiting for the task: (returnval){ [ 1663.298785] env[62405]: value = "task-1947269" [ 1663.298785] env[62405]: _type = "Task" [ 1663.298785] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1663.315781] env[62405]: DEBUG oslo_vmware.api [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Task: {'id': task-1947269, 'name': Rename_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1663.339058] env[62405]: DEBUG oslo_vmware.api [None req-fedbb7f2-75ea-41de-b4e7-8164335aee2d tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1947266, 'name': ReconfigVM_Task, 'duration_secs': 0.17031} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1663.339601] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-fedbb7f2-75ea-41de-b4e7-8164335aee2d tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 67bf25ea-5774-4246-a3e6-2aeb0ebf6731] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-401433', 'volume_id': 'dd61f776-0703-457f-8823-3fc5792787a0', 'name': 'volume-dd61f776-0703-457f-8823-3fc5792787a0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '67bf25ea-5774-4246-a3e6-2aeb0ebf6731', 'attached_at': '', 'detached_at': '', 'volume_id': 'dd61f776-0703-457f-8823-3fc5792787a0', 'serial': 'dd61f776-0703-457f-8823-3fc5792787a0'} {{(pid=62405) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1663.342773] env[62405]: DEBUG nova.network.neutron [-] [instance: 0491dc4b-cf35-4035-aca9-baf43b86af7e] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1663.354375] env[62405]: DEBUG nova.policy [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ef6c85db897e497a943a0816727c0066', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f3b6dadf205e4d11b64ea03fac66c712', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1663.414753] env[62405]: DEBUG nova.compute.manager [req-2c08b19f-accf-4aec-84d1-cfd18f260fba req-e9a891eb-cb1e-43b6-8c68-0e61100d4350 service nova] [instance: 0491dc4b-cf35-4035-aca9-baf43b86af7e] Received event network-vif-deleted-19538d37-e369-4f7b-8051-61d2c0a7fb00 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1663.585139] env[62405]: DEBUG oslo_vmware.api [None req-b04e17b9-76b5-488f-8a85-8ffe1da4ec56 tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] Task: {'id': task-1947267, 'name': ReconfigVM_Task, 'duration_secs': 0.283768} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1663.585415] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-b04e17b9-76b5-488f-8a85-8ffe1da4ec56 tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] Reconfigured VM instance instance-0000001c to detach disk 2000 {{(pid=62405) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1663.590693] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3d635877-262f-4446-855b-57998411f7f6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.611216] env[62405]: DEBUG oslo_vmware.api [None req-b04e17b9-76b5-488f-8a85-8ffe1da4ec56 tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] Waiting for the task: (returnval){ [ 1663.611216] env[62405]: value = "task-1947270" [ 1663.611216] env[62405]: _type = "Task" [ 1663.611216] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1663.624861] env[62405]: DEBUG oslo_vmware.api [None req-b04e17b9-76b5-488f-8a85-8ffe1da4ec56 tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] Task: {'id': task-1947270, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1663.638008] env[62405]: DEBUG oslo_vmware.api [None req-18ca5fbd-bc81-43c5-8f75-59042c3ac786 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947265, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1663.670293] env[62405]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1663.670293] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]522170e7-4712-aa73-99fc-3d07de816df4" [ 1663.670293] env[62405]: _type = "HttpNfcLease" [ 1663.670293] env[62405]: } is ready. {{(pid=62405) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1663.672020] env[62405]: DEBUG oslo_vmware.rw_handles [None req-966cba6a-1b89-4866-bb58-34ddcb775da9 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1663.672020] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]522170e7-4712-aa73-99fc-3d07de816df4" [ 1663.672020] env[62405]: _type = "HttpNfcLease" [ 1663.672020] env[62405]: }. {{(pid=62405) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1663.672020] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf3c5ee6-d05b-4fc6-8397-d51eaebe91f1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.686147] env[62405]: DEBUG oslo_vmware.rw_handles [None req-966cba6a-1b89-4866-bb58-34ddcb775da9 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52df19e7-1156-ce96-ea69-058aea306fd7/disk-0.vmdk from lease info. {{(pid=62405) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1663.686147] env[62405]: DEBUG oslo_vmware.rw_handles [None req-966cba6a-1b89-4866-bb58-34ddcb775da9 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52df19e7-1156-ce96-ea69-058aea306fd7/disk-0.vmdk for reading. {{(pid=62405) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1663.774173] env[62405]: DEBUG nova.compute.manager [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] [instance: 377365a4-7538-4bab-a181-1940e6fb4066] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1663.795866] env[62405]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-2f1927ce-50e7-410e-ad20-719acba52922 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.816601] env[62405]: DEBUG oslo_vmware.api [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Task: {'id': task-1947269, 'name': Rename_Task, 'duration_secs': 0.15723} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1663.819020] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] [instance: 6213702e-8e39-4342-b62f-2c9495017bf9] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1663.819020] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3bbf3eaa-5c04-496b-8afd-2fa6b2c53ce0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.832881] env[62405]: DEBUG oslo_vmware.api [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Waiting for the task: (returnval){ [ 1663.832881] env[62405]: value = "task-1947271" [ 1663.832881] env[62405]: _type = "Task" [ 1663.832881] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1663.848325] env[62405]: DEBUG oslo_vmware.api [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Task: {'id': task-1947271, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1663.848690] env[62405]: INFO nova.compute.manager [-] [instance: 0491dc4b-cf35-4035-aca9-baf43b86af7e] Took 1.74 seconds to deallocate network for instance. [ 1663.936298] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93643ddd-7114-4a91-85ec-9e9d399b8a4f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.947293] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a0e85d6-a6a4-4771-92a3-dd0d3fc5c4d4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.984396] env[62405]: DEBUG nova.network.neutron [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] [instance: 377365a4-7538-4bab-a181-1940e6fb4066] Successfully created port: e57e57ca-cb20-4bcb-bdee-5c96e246e949 {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1663.989434] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae266c75-8325-4f5f-b58b-6496d95f130b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.000886] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86afb1c2-26a0-4b2c-b00c-104c56e204f7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.021223] env[62405]: DEBUG nova.compute.provider_tree [None req-a4569a7b-d444-4557-8de7-7b81f13bbb46 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1664.128389] env[62405]: DEBUG oslo_vmware.api [None req-b04e17b9-76b5-488f-8a85-8ffe1da4ec56 tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] Task: {'id': task-1947270, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1664.145456] env[62405]: DEBUG oslo_vmware.api [None req-18ca5fbd-bc81-43c5-8f75-59042c3ac786 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947265, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1664.285112] env[62405]: DEBUG nova.network.neutron [-] [instance: f8c6f99f-499f-4886-aae9-5f08969175f6] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1664.346641] env[62405]: DEBUG oslo_vmware.api [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Task: {'id': task-1947271, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1664.360141] env[62405]: DEBUG oslo_concurrency.lockutils [None req-543444ba-0ef2-4f11-a0fe-b1789c5f199c tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1664.412239] env[62405]: DEBUG nova.objects.instance [None req-fedbb7f2-75ea-41de-b4e7-8164335aee2d tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Lazy-loading 'flavor' on Instance uuid 67bf25ea-5774-4246-a3e6-2aeb0ebf6731 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1664.527531] env[62405]: DEBUG nova.scheduler.client.report [None req-a4569a7b-d444-4557-8de7-7b81f13bbb46 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1664.630080] env[62405]: DEBUG oslo_vmware.api [None req-b04e17b9-76b5-488f-8a85-8ffe1da4ec56 tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] Task: {'id': task-1947270, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1664.641434] env[62405]: DEBUG oslo_vmware.api [None req-18ca5fbd-bc81-43c5-8f75-59042c3ac786 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947265, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1664.791839] env[62405]: DEBUG nova.compute.manager [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] [instance: 377365a4-7538-4bab-a181-1940e6fb4066] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1664.794567] env[62405]: INFO nova.compute.manager [-] [instance: f8c6f99f-499f-4886-aae9-5f08969175f6] Took 2.21 seconds to deallocate network for instance. [ 1664.838627] env[62405]: DEBUG nova.virt.hardware [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1664.838798] env[62405]: DEBUG nova.virt.hardware [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1664.838946] env[62405]: DEBUG nova.virt.hardware [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1664.839616] env[62405]: DEBUG nova.virt.hardware [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1664.839844] env[62405]: DEBUG nova.virt.hardware [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1664.840048] env[62405]: DEBUG nova.virt.hardware [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1664.840508] env[62405]: DEBUG nova.virt.hardware [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1664.840508] env[62405]: DEBUG nova.virt.hardware [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1664.840600] env[62405]: DEBUG nova.virt.hardware [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1664.840774] env[62405]: DEBUG nova.virt.hardware [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1664.842939] env[62405]: DEBUG nova.virt.hardware [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1664.843966] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-213be82d-c837-4ec1-af40-331ec9c6d302 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.864768] env[62405]: DEBUG oslo_vmware.api [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Task: {'id': task-1947271, 'name': PowerOnVM_Task, 'duration_secs': 0.521573} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1664.865260] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] [instance: 6213702e-8e39-4342-b62f-2c9495017bf9] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1664.865815] env[62405]: INFO nova.compute.manager [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] [instance: 6213702e-8e39-4342-b62f-2c9495017bf9] Took 8.92 seconds to spawn the instance on the hypervisor. [ 1664.865815] env[62405]: DEBUG nova.compute.manager [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] [instance: 6213702e-8e39-4342-b62f-2c9495017bf9] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1664.867255] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4dfa3cf-80e2-45a8-9a53-98de508c2188 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.874773] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aad149fe-7cfd-4f61-b2ae-c67856633cfd {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.918794] env[62405]: DEBUG oslo_concurrency.lockutils [None req-fedbb7f2-75ea-41de-b4e7-8164335aee2d tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Lock "67bf25ea-5774-4246-a3e6-2aeb0ebf6731" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.815s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1665.032547] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a4569a7b-d444-4557-8de7-7b81f13bbb46 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.276s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1665.035300] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.600s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1665.036865] env[62405]: INFO nova.compute.claims [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: f0ca0d3d-cb2b-467b-a466-c270794055d7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1665.069982] env[62405]: INFO nova.scheduler.client.report [None req-a4569a7b-d444-4557-8de7-7b81f13bbb46 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Deleted allocations for instance b21dc1e7-dacd-4154-9bc3-0fa3774695a8 [ 1665.125804] env[62405]: DEBUG oslo_vmware.api [None req-b04e17b9-76b5-488f-8a85-8ffe1da4ec56 tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] Task: {'id': task-1947270, 'name': ReconfigVM_Task, 'duration_secs': 1.208148} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1665.129020] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-b04e17b9-76b5-488f-8a85-8ffe1da4ec56 tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-401333', 'volume_id': '2c06d022-a782-4194-9dee-348bf3888516', 'name': 'volume-2c06d022-a782-4194-9dee-348bf3888516', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9', 'attached_at': '', 'detached_at': '', 'volume_id': '2c06d022-a782-4194-9dee-348bf3888516', 'serial': '2c06d022-a782-4194-9dee-348bf3888516'} {{(pid=62405) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1665.129020] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-b04e17b9-76b5-488f-8a85-8ffe1da4ec56 tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1665.129020] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c56e471f-3823-4bf6-808d-5719d7264827 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.142708] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-b04e17b9-76b5-488f-8a85-8ffe1da4ec56 tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1665.143384] env[62405]: DEBUG oslo_vmware.api [None req-18ca5fbd-bc81-43c5-8f75-59042c3ac786 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947265, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1665.143384] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-29daf9f3-1a98-4b97-a82b-518879800d04 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.274631] env[62405]: DEBUG oslo_concurrency.lockutils [None req-71ec4483-05a9-473d-953a-ec97012f914e tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Acquiring lock "aae3abca-951a-4149-9ccb-d70bea218aea" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1665.275506] env[62405]: DEBUG oslo_concurrency.lockutils [None req-71ec4483-05a9-473d-953a-ec97012f914e tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Lock "aae3abca-951a-4149-9ccb-d70bea218aea" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1665.303764] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2295ed20-ced6-48e8-b941-a33998d2c4ca tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1665.307618] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-b04e17b9-76b5-488f-8a85-8ffe1da4ec56 tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1665.307994] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-b04e17b9-76b5-488f-8a85-8ffe1da4ec56 tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1665.308230] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-b04e17b9-76b5-488f-8a85-8ffe1da4ec56 tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] Deleting the datastore file [datastore1] fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9 {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1665.308587] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6c8d88e6-067c-4830-a80c-c6ea5a2cce65 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.317190] env[62405]: DEBUG oslo_vmware.api [None req-b04e17b9-76b5-488f-8a85-8ffe1da4ec56 tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] Waiting for the task: (returnval){ [ 1665.317190] env[62405]: value = "task-1947273" [ 1665.317190] env[62405]: _type = "Task" [ 1665.317190] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1665.333826] env[62405]: DEBUG oslo_vmware.api [None req-b04e17b9-76b5-488f-8a85-8ffe1da4ec56 tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] Task: {'id': task-1947273, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1665.406970] env[62405]: INFO nova.compute.manager [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] [instance: 6213702e-8e39-4342-b62f-2c9495017bf9] Took 42.90 seconds to build instance. [ 1665.584215] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a4569a7b-d444-4557-8de7-7b81f13bbb46 tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Lock "b21dc1e7-dacd-4154-9bc3-0fa3774695a8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.136s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1665.643330] env[62405]: DEBUG oslo_vmware.api [None req-18ca5fbd-bc81-43c5-8f75-59042c3ac786 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947265, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1665.778809] env[62405]: DEBUG nova.compute.manager [None req-71ec4483-05a9-473d-953a-ec97012f914e tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [instance: aae3abca-951a-4149-9ccb-d70bea218aea] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1665.833845] env[62405]: DEBUG oslo_vmware.api [None req-b04e17b9-76b5-488f-8a85-8ffe1da4ec56 tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] Task: {'id': task-1947273, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.175914} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1665.833845] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-b04e17b9-76b5-488f-8a85-8ffe1da4ec56 tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1665.833845] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-b04e17b9-76b5-488f-8a85-8ffe1da4ec56 tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1665.833845] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-b04e17b9-76b5-488f-8a85-8ffe1da4ec56 tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1665.887761] env[62405]: DEBUG nova.compute.manager [req-5c499502-1375-4f37-9fbc-bef3a86968a2 req-7dd2da90-94b2-4af3-b5f0-f8720431bab5 service nova] [instance: f8c6f99f-499f-4886-aae9-5f08969175f6] Received event network-vif-deleted-1b9bb4b7-58d5-4182-ad5b-0a10e3a34546 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1665.911648] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a944432c-c654-4374-8c71-9d5cfbbfc9cb tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Lock "6213702e-8e39-4342-b62f-2c9495017bf9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 70.189s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1665.933228] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-b04e17b9-76b5-488f-8a85-8ffe1da4ec56 tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] Volume detach. Driver type: vmdk {{(pid=62405) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1665.934158] env[62405]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bb864a5d-58ac-4450-8ea1-db0344cf5836 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.948691] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd9c7065-31f1-48ff-9791-563b26a362d3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.992333] env[62405]: ERROR nova.compute.manager [None req-b04e17b9-76b5-488f-8a85-8ffe1da4ec56 tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] Failed to detach volume 2c06d022-a782-4194-9dee-348bf3888516 from /dev/sda: nova.exception.InstanceNotFound: Instance fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9 could not be found. [ 1665.992333] env[62405]: ERROR nova.compute.manager [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] Traceback (most recent call last): [ 1665.992333] env[62405]: ERROR nova.compute.manager [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] File "/opt/stack/nova/nova/compute/manager.py", line 4185, in _do_rebuild_instance [ 1665.992333] env[62405]: ERROR nova.compute.manager [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] self.driver.rebuild(**kwargs) [ 1665.992333] env[62405]: ERROR nova.compute.manager [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] File "/opt/stack/nova/nova/virt/driver.py", line 497, in rebuild [ 1665.992333] env[62405]: ERROR nova.compute.manager [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] raise NotImplementedError() [ 1665.992333] env[62405]: ERROR nova.compute.manager [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] NotImplementedError [ 1665.992333] env[62405]: ERROR nova.compute.manager [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] [ 1665.992333] env[62405]: ERROR nova.compute.manager [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] During handling of the above exception, another exception occurred: [ 1665.992333] env[62405]: ERROR nova.compute.manager [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] [ 1665.992333] env[62405]: ERROR nova.compute.manager [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] Traceback (most recent call last): [ 1665.992333] env[62405]: ERROR nova.compute.manager [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] File "/opt/stack/nova/nova/compute/manager.py", line 3608, in _detach_root_volume [ 1665.992333] env[62405]: ERROR nova.compute.manager [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] self.driver.detach_volume(context, old_connection_info, [ 1665.992333] env[62405]: ERROR nova.compute.manager [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 561, in detach_volume [ 1665.992333] env[62405]: ERROR nova.compute.manager [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] return self._volumeops.detach_volume(connection_info, instance) [ 1665.992333] env[62405]: ERROR nova.compute.manager [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 1665.992333] env[62405]: ERROR nova.compute.manager [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] self._detach_volume_vmdk(connection_info, instance) [ 1665.992333] env[62405]: ERROR nova.compute.manager [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 1665.992333] env[62405]: ERROR nova.compute.manager [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 1665.992333] env[62405]: ERROR nova.compute.manager [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1145, in get_vm_ref [ 1665.992333] env[62405]: ERROR nova.compute.manager [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] stable_ref.fetch_moref(session) [ 1665.992333] env[62405]: ERROR nova.compute.manager [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1136, in fetch_moref [ 1665.992333] env[62405]: ERROR nova.compute.manager [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] raise exception.InstanceNotFound(instance_id=self._uuid) [ 1665.992333] env[62405]: ERROR nova.compute.manager [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] nova.exception.InstanceNotFound: Instance fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9 could not be found. [ 1665.992333] env[62405]: ERROR nova.compute.manager [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] [ 1666.158989] env[62405]: DEBUG oslo_vmware.api [None req-18ca5fbd-bc81-43c5-8f75-59042c3ac786 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947265, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1666.242521] env[62405]: DEBUG nova.compute.utils [None req-b04e17b9-76b5-488f-8a85-8ffe1da4ec56 tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] Build of instance fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9 aborted: Failed to rebuild volume backed instance. {{(pid=62405) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1666.246512] env[62405]: ERROR nova.compute.manager [None req-b04e17b9-76b5-488f-8a85-8ffe1da4ec56 tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] Setting instance vm_state to ERROR: nova.exception.BuildAbortException: Build of instance fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9 aborted: Failed to rebuild volume backed instance. [ 1666.246512] env[62405]: ERROR nova.compute.manager [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] Traceback (most recent call last): [ 1666.246512] env[62405]: ERROR nova.compute.manager [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] File "/opt/stack/nova/nova/compute/manager.py", line 4185, in _do_rebuild_instance [ 1666.246512] env[62405]: ERROR nova.compute.manager [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] self.driver.rebuild(**kwargs) [ 1666.246512] env[62405]: ERROR nova.compute.manager [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] File "/opt/stack/nova/nova/virt/driver.py", line 497, in rebuild [ 1666.246512] env[62405]: ERROR nova.compute.manager [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] raise NotImplementedError() [ 1666.246512] env[62405]: ERROR nova.compute.manager [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] NotImplementedError [ 1666.246512] env[62405]: ERROR nova.compute.manager [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] [ 1666.246512] env[62405]: ERROR nova.compute.manager [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] During handling of the above exception, another exception occurred: [ 1666.246512] env[62405]: ERROR nova.compute.manager [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] [ 1666.246512] env[62405]: ERROR nova.compute.manager [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] Traceback (most recent call last): [ 1666.246512] env[62405]: ERROR nova.compute.manager [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] File "/opt/stack/nova/nova/compute/manager.py", line 3643, in _rebuild_volume_backed_instance [ 1666.246512] env[62405]: ERROR nova.compute.manager [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] self._detach_root_volume(context, instance, root_bdm) [ 1666.246512] env[62405]: ERROR nova.compute.manager [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] File "/opt/stack/nova/nova/compute/manager.py", line 3622, in _detach_root_volume [ 1666.246512] env[62405]: ERROR nova.compute.manager [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] with excutils.save_and_reraise_exception(): [ 1666.246512] env[62405]: ERROR nova.compute.manager [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1666.246512] env[62405]: ERROR nova.compute.manager [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] self.force_reraise() [ 1666.246512] env[62405]: ERROR nova.compute.manager [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1666.246512] env[62405]: ERROR nova.compute.manager [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] raise self.value [ 1666.246512] env[62405]: ERROR nova.compute.manager [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] File "/opt/stack/nova/nova/compute/manager.py", line 3608, in _detach_root_volume [ 1666.246512] env[62405]: ERROR nova.compute.manager [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] self.driver.detach_volume(context, old_connection_info, [ 1666.246512] env[62405]: ERROR nova.compute.manager [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 561, in detach_volume [ 1666.246512] env[62405]: ERROR nova.compute.manager [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] return self._volumeops.detach_volume(connection_info, instance) [ 1666.246512] env[62405]: ERROR nova.compute.manager [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 1666.246512] env[62405]: ERROR nova.compute.manager [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] self._detach_volume_vmdk(connection_info, instance) [ 1666.246512] env[62405]: ERROR nova.compute.manager [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 1666.246512] env[62405]: ERROR nova.compute.manager [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 1666.246512] env[62405]: ERROR nova.compute.manager [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1145, in get_vm_ref [ 1666.246512] env[62405]: ERROR nova.compute.manager [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] stable_ref.fetch_moref(session) [ 1666.246512] env[62405]: ERROR nova.compute.manager [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1136, in fetch_moref [ 1666.246512] env[62405]: ERROR nova.compute.manager [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] raise exception.InstanceNotFound(instance_id=self._uuid) [ 1666.246512] env[62405]: ERROR nova.compute.manager [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] nova.exception.InstanceNotFound: Instance fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9 could not be found. [ 1666.246512] env[62405]: ERROR nova.compute.manager [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] [ 1666.246512] env[62405]: ERROR nova.compute.manager [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] During handling of the above exception, another exception occurred: [ 1666.246512] env[62405]: ERROR nova.compute.manager [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] [ 1666.246512] env[62405]: ERROR nova.compute.manager [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] Traceback (most recent call last): [ 1666.246512] env[62405]: ERROR nova.compute.manager [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] File "/opt/stack/nova/nova/compute/manager.py", line 11383, in _error_out_instance_on_exception [ 1666.246512] env[62405]: ERROR nova.compute.manager [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] yield [ 1666.246512] env[62405]: ERROR nova.compute.manager [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] File "/opt/stack/nova/nova/compute/manager.py", line 3911, in rebuild_instance [ 1666.246512] env[62405]: ERROR nova.compute.manager [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] self._do_rebuild_instance_with_claim( [ 1666.247827] env[62405]: ERROR nova.compute.manager [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] File "/opt/stack/nova/nova/compute/manager.py", line 3997, in _do_rebuild_instance_with_claim [ 1666.247827] env[62405]: ERROR nova.compute.manager [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] self._do_rebuild_instance( [ 1666.247827] env[62405]: ERROR nova.compute.manager [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] File "/opt/stack/nova/nova/compute/manager.py", line 4189, in _do_rebuild_instance [ 1666.247827] env[62405]: ERROR nova.compute.manager [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] self._rebuild_default_impl(**kwargs) [ 1666.247827] env[62405]: ERROR nova.compute.manager [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] File "/opt/stack/nova/nova/compute/manager.py", line 3766, in _rebuild_default_impl [ 1666.247827] env[62405]: ERROR nova.compute.manager [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] self._rebuild_volume_backed_instance( [ 1666.247827] env[62405]: ERROR nova.compute.manager [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] File "/opt/stack/nova/nova/compute/manager.py", line 3658, in _rebuild_volume_backed_instance [ 1666.247827] env[62405]: ERROR nova.compute.manager [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] raise exception.BuildAbortException( [ 1666.247827] env[62405]: ERROR nova.compute.manager [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] nova.exception.BuildAbortException: Build of instance fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9 aborted: Failed to rebuild volume backed instance. [ 1666.247827] env[62405]: ERROR nova.compute.manager [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] [ 1666.310599] env[62405]: DEBUG oslo_concurrency.lockutils [None req-71ec4483-05a9-473d-953a-ec97012f914e tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1666.327144] env[62405]: DEBUG nova.network.neutron [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] [instance: 377365a4-7538-4bab-a181-1940e6fb4066] Successfully updated port: e57e57ca-cb20-4bcb-bdee-5c96e246e949 {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1666.608272] env[62405]: INFO nova.compute.manager [None req-85c5669c-3ad3-487a-b7d6-d43292c71dd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a6a0e918-425d-44de-a22b-8779e9108533] Rebuilding instance [ 1666.670318] env[62405]: DEBUG oslo_vmware.api [None req-18ca5fbd-bc81-43c5-8f75-59042c3ac786 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947265, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1666.677150] env[62405]: DEBUG nova.compute.manager [None req-85c5669c-3ad3-487a-b7d6-d43292c71dd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a6a0e918-425d-44de-a22b-8779e9108533] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1666.677656] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7b2abff-59c0-4663-a516-279fa869c027 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.698202] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c895588-d2b1-43cc-8e1d-3ad6e6b5df80 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.713585] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcd8a2eb-420a-438a-95d1-3281b916966c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.774086] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3334ea39-2446-4d96-a31c-be3666ef7ac9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.784944] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c69fc6e-ddb1-41f5-9cc8-ef43cd69298d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.804601] env[62405]: DEBUG nova.compute.provider_tree [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1666.833404] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Acquiring lock "refresh_cache-377365a4-7538-4bab-a181-1940e6fb4066" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1666.833847] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Acquired lock "refresh_cache-377365a4-7538-4bab-a181-1940e6fb4066" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1666.833847] env[62405]: DEBUG nova.network.neutron [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] [instance: 377365a4-7538-4bab-a181-1940e6fb4066] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1667.142214] env[62405]: INFO nova.compute.manager [None req-393d79bc-b5e4-4999-9705-c3faff616f4f tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] [instance: 6213702e-8e39-4342-b62f-2c9495017bf9] Rescuing [ 1667.142515] env[62405]: DEBUG oslo_concurrency.lockutils [None req-393d79bc-b5e4-4999-9705-c3faff616f4f tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Acquiring lock "refresh_cache-6213702e-8e39-4342-b62f-2c9495017bf9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1667.142767] env[62405]: DEBUG oslo_concurrency.lockutils [None req-393d79bc-b5e4-4999-9705-c3faff616f4f tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Acquired lock "refresh_cache-6213702e-8e39-4342-b62f-2c9495017bf9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1667.142923] env[62405]: DEBUG nova.network.neutron [None req-393d79bc-b5e4-4999-9705-c3faff616f4f tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] [instance: 6213702e-8e39-4342-b62f-2c9495017bf9] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1667.157513] env[62405]: DEBUG oslo_vmware.api [None req-18ca5fbd-bc81-43c5-8f75-59042c3ac786 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947265, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1667.309478] env[62405]: DEBUG nova.scheduler.client.report [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1667.384021] env[62405]: DEBUG nova.network.neutron [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] [instance: 377365a4-7538-4bab-a181-1940e6fb4066] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1667.659840] env[62405]: DEBUG oslo_vmware.api [None req-18ca5fbd-bc81-43c5-8f75-59042c3ac786 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947265, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1667.685673] env[62405]: DEBUG nova.network.neutron [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] [instance: 377365a4-7538-4bab-a181-1940e6fb4066] Updating instance_info_cache with network_info: [{"id": "e57e57ca-cb20-4bcb-bdee-5c96e246e949", "address": "fa:16:3e:d5:1c:86", "network": {"id": "d33e0185-2b73-4ceb-9fb9-434f6a6e7a70", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-422171323-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f3b6dadf205e4d11b64ea03fac66c712", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1e7173e-4163-4212-9339-aea3eddd359e", "external-id": "nsx-vlan-transportzone-525", "segmentation_id": 525, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape57e57ca-cb", "ovs_interfaceid": "e57e57ca-cb20-4bcb-bdee-5c96e246e949", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1667.711780] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-85c5669c-3ad3-487a-b7d6-d43292c71dd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a6a0e918-425d-44de-a22b-8779e9108533] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1667.711780] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f280e251-709f-4687-be18-518e3674dfe4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.718870] env[62405]: DEBUG oslo_vmware.api [None req-85c5669c-3ad3-487a-b7d6-d43292c71dd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for the task: (returnval){ [ 1667.718870] env[62405]: value = "task-1947274" [ 1667.718870] env[62405]: _type = "Task" [ 1667.718870] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1667.729072] env[62405]: DEBUG oslo_vmware.api [None req-85c5669c-3ad3-487a-b7d6-d43292c71dd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947274, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1667.819027] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.781s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1667.819027] env[62405]: DEBUG nova.compute.manager [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: f0ca0d3d-cb2b-467b-a466-c270794055d7] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1667.820303] env[62405]: DEBUG oslo_concurrency.lockutils [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.388s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1667.823285] env[62405]: INFO nova.compute.claims [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] [instance: 34ec55c6-1a7a-4ffa-8efd-9eedd7495d44] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1668.000586] env[62405]: DEBUG nova.network.neutron [None req-393d79bc-b5e4-4999-9705-c3faff616f4f tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] [instance: 6213702e-8e39-4342-b62f-2c9495017bf9] Updating instance_info_cache with network_info: [{"id": "f6fffc80-6395-4f72-8a63-b037918502c8", "address": "fa:16:3e:e9:4c:f5", "network": {"id": "bdf0ffbc-8220-49ae-80a5-06dfea99bea9", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1271406419-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "024f8c817a3142b983afd4018e025452", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ff90ec9-3c7e-4e76-b409-fcf37fc588d8", "external-id": "nsx-vlan-transportzone-475", "segmentation_id": 475, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf6fffc80-63", "ovs_interfaceid": "f6fffc80-6395-4f72-8a63-b037918502c8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1668.028050] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ffd96f4e-6412-4601-b309-798a439a57ab tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Acquiring lock "2c623c00-92f2-4cc4-8503-963c3308d708" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1668.028050] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ffd96f4e-6412-4601-b309-798a439a57ab tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Lock "2c623c00-92f2-4cc4-8503-963c3308d708" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1668.157016] env[62405]: DEBUG oslo_vmware.api [None req-18ca5fbd-bc81-43c5-8f75-59042c3ac786 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947265, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1668.189050] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Releasing lock "refresh_cache-377365a4-7538-4bab-a181-1940e6fb4066" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1668.189430] env[62405]: DEBUG nova.compute.manager [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] [instance: 377365a4-7538-4bab-a181-1940e6fb4066] Instance network_info: |[{"id": "e57e57ca-cb20-4bcb-bdee-5c96e246e949", "address": "fa:16:3e:d5:1c:86", "network": {"id": "d33e0185-2b73-4ceb-9fb9-434f6a6e7a70", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-422171323-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f3b6dadf205e4d11b64ea03fac66c712", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1e7173e-4163-4212-9339-aea3eddd359e", "external-id": "nsx-vlan-transportzone-525", "segmentation_id": 525, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape57e57ca-cb", "ovs_interfaceid": "e57e57ca-cb20-4bcb-bdee-5c96e246e949", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1668.189962] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] [instance: 377365a4-7538-4bab-a181-1940e6fb4066] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d5:1c:86', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c1e7173e-4163-4212-9339-aea3eddd359e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e57e57ca-cb20-4bcb-bdee-5c96e246e949', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1668.198070] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Creating folder: Project (f3b6dadf205e4d11b64ea03fac66c712). Parent ref: group-v401284. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1668.198070] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-22e0f041-ecd3-430d-979f-0eb5ff43201e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.213570] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Created folder: Project (f3b6dadf205e4d11b64ea03fac66c712) in parent group-v401284. [ 1668.213836] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Creating folder: Instances. Parent ref: group-v401439. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1668.214101] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5c986ae3-b93f-4b1f-a21c-364d3b7177b4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.227917] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Created folder: Instances in parent group-v401439. [ 1668.228328] env[62405]: DEBUG oslo.service.loopingcall [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1668.233102] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 377365a4-7538-4bab-a181-1940e6fb4066] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1668.233102] env[62405]: DEBUG oslo_vmware.api [None req-85c5669c-3ad3-487a-b7d6-d43292c71dd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947274, 'name': PowerOffVM_Task, 'duration_secs': 0.256353} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1668.233102] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ef8342fd-579a-4197-8e96-c44aecd60fbe {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.259045] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-85c5669c-3ad3-487a-b7d6-d43292c71dd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a6a0e918-425d-44de-a22b-8779e9108533] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1668.259394] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-85c5669c-3ad3-487a-b7d6-d43292c71dd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a6a0e918-425d-44de-a22b-8779e9108533] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1668.260502] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d501256-341b-4ec2-9474-3237f96feb83 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.271940] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-85c5669c-3ad3-487a-b7d6-d43292c71dd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a6a0e918-425d-44de-a22b-8779e9108533] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1668.272388] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cc5c068f-ee01-4f0c-a23e-a32f672d62ab {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.275646] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1668.275646] env[62405]: value = "task-1947277" [ 1668.275646] env[62405]: _type = "Task" [ 1668.275646] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1668.284156] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b04e17b9-76b5-488f-8a85-8ffe1da4ec56 tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1668.287826] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947277, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1668.329837] env[62405]: DEBUG nova.compute.utils [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1668.335142] env[62405]: DEBUG nova.compute.manager [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: f0ca0d3d-cb2b-467b-a466-c270794055d7] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1668.335675] env[62405]: DEBUG nova.network.neutron [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: f0ca0d3d-cb2b-467b-a466-c270794055d7] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1668.416673] env[62405]: DEBUG nova.compute.manager [req-9f661986-81ce-41c1-8477-10ff0ab9e7b3 req-8ab04302-417c-4b45-bd13-019ec70be37a service nova] [instance: 377365a4-7538-4bab-a181-1940e6fb4066] Received event network-vif-plugged-e57e57ca-cb20-4bcb-bdee-5c96e246e949 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1668.416945] env[62405]: DEBUG oslo_concurrency.lockutils [req-9f661986-81ce-41c1-8477-10ff0ab9e7b3 req-8ab04302-417c-4b45-bd13-019ec70be37a service nova] Acquiring lock "377365a4-7538-4bab-a181-1940e6fb4066-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1668.417262] env[62405]: DEBUG oslo_concurrency.lockutils [req-9f661986-81ce-41c1-8477-10ff0ab9e7b3 req-8ab04302-417c-4b45-bd13-019ec70be37a service nova] Lock "377365a4-7538-4bab-a181-1940e6fb4066-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1668.417515] env[62405]: DEBUG oslo_concurrency.lockutils [req-9f661986-81ce-41c1-8477-10ff0ab9e7b3 req-8ab04302-417c-4b45-bd13-019ec70be37a service nova] Lock "377365a4-7538-4bab-a181-1940e6fb4066-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1668.417671] env[62405]: DEBUG nova.compute.manager [req-9f661986-81ce-41c1-8477-10ff0ab9e7b3 req-8ab04302-417c-4b45-bd13-019ec70be37a service nova] [instance: 377365a4-7538-4bab-a181-1940e6fb4066] No waiting events found dispatching network-vif-plugged-e57e57ca-cb20-4bcb-bdee-5c96e246e949 {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1668.417902] env[62405]: WARNING nova.compute.manager [req-9f661986-81ce-41c1-8477-10ff0ab9e7b3 req-8ab04302-417c-4b45-bd13-019ec70be37a service nova] [instance: 377365a4-7538-4bab-a181-1940e6fb4066] Received unexpected event network-vif-plugged-e57e57ca-cb20-4bcb-bdee-5c96e246e949 for instance with vm_state building and task_state spawning. [ 1668.417970] env[62405]: DEBUG nova.compute.manager [req-9f661986-81ce-41c1-8477-10ff0ab9e7b3 req-8ab04302-417c-4b45-bd13-019ec70be37a service nova] [instance: 377365a4-7538-4bab-a181-1940e6fb4066] Received event network-changed-e57e57ca-cb20-4bcb-bdee-5c96e246e949 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1668.418155] env[62405]: DEBUG nova.compute.manager [req-9f661986-81ce-41c1-8477-10ff0ab9e7b3 req-8ab04302-417c-4b45-bd13-019ec70be37a service nova] [instance: 377365a4-7538-4bab-a181-1940e6fb4066] Refreshing instance network info cache due to event network-changed-e57e57ca-cb20-4bcb-bdee-5c96e246e949. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1668.418358] env[62405]: DEBUG oslo_concurrency.lockutils [req-9f661986-81ce-41c1-8477-10ff0ab9e7b3 req-8ab04302-417c-4b45-bd13-019ec70be37a service nova] Acquiring lock "refresh_cache-377365a4-7538-4bab-a181-1940e6fb4066" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1668.418773] env[62405]: DEBUG oslo_concurrency.lockutils [req-9f661986-81ce-41c1-8477-10ff0ab9e7b3 req-8ab04302-417c-4b45-bd13-019ec70be37a service nova] Acquired lock "refresh_cache-377365a4-7538-4bab-a181-1940e6fb4066" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1668.418773] env[62405]: DEBUG nova.network.neutron [req-9f661986-81ce-41c1-8477-10ff0ab9e7b3 req-8ab04302-417c-4b45-bd13-019ec70be37a service nova] [instance: 377365a4-7538-4bab-a181-1940e6fb4066] Refreshing network info cache for port e57e57ca-cb20-4bcb-bdee-5c96e246e949 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1668.424637] env[62405]: DEBUG nova.policy [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4ab5cc5829014c4ebafbf88400b22a8c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5ba2fba100b943a2a415ec37b9365388', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1668.473465] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-85c5669c-3ad3-487a-b7d6-d43292c71dd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a6a0e918-425d-44de-a22b-8779e9108533] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1668.473704] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-85c5669c-3ad3-487a-b7d6-d43292c71dd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a6a0e918-425d-44de-a22b-8779e9108533] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1668.473930] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-85c5669c-3ad3-487a-b7d6-d43292c71dd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Deleting the datastore file [datastore1] a6a0e918-425d-44de-a22b-8779e9108533 {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1668.474275] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-42196733-d1c6-4e33-9a32-c9de98a11905 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.489460] env[62405]: DEBUG oslo_vmware.api [None req-85c5669c-3ad3-487a-b7d6-d43292c71dd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for the task: (returnval){ [ 1668.489460] env[62405]: value = "task-1947279" [ 1668.489460] env[62405]: _type = "Task" [ 1668.489460] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1668.500702] env[62405]: DEBUG oslo_vmware.api [None req-85c5669c-3ad3-487a-b7d6-d43292c71dd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947279, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1668.503430] env[62405]: DEBUG oslo_concurrency.lockutils [None req-393d79bc-b5e4-4999-9705-c3faff616f4f tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Releasing lock "refresh_cache-6213702e-8e39-4342-b62f-2c9495017bf9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1668.531362] env[62405]: DEBUG nova.compute.manager [None req-ffd96f4e-6412-4601-b309-798a439a57ab tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 2c623c00-92f2-4cc4-8503-963c3308d708] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1668.658412] env[62405]: DEBUG oslo_vmware.api [None req-18ca5fbd-bc81-43c5-8f75-59042c3ac786 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947265, 'name': ReconfigVM_Task, 'duration_secs': 5.81915} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1668.658724] env[62405]: DEBUG oslo_concurrency.lockutils [None req-18ca5fbd-bc81-43c5-8f75-59042c3ac786 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Releasing lock "23748dfd-7c60-41db-8acb-7b49cf1c27db" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1668.658992] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-18ca5fbd-bc81-43c5-8f75-59042c3ac786 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 23748dfd-7c60-41db-8acb-7b49cf1c27db] Reconfigured VM to detach interface {{(pid=62405) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1668.674661] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] Acquiring lock "4c8c0d2f-d8d3-4422-8a5c-8999636b22be" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1668.675634] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] Lock "4c8c0d2f-d8d3-4422-8a5c-8999636b22be" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1668.787414] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947277, 'name': CreateVM_Task, 'duration_secs': 0.500225} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1668.787630] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 377365a4-7538-4bab-a181-1940e6fb4066] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1668.789010] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1668.789224] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1668.789585] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1668.789870] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8be752e0-a20f-470f-a9a5-9fc9e8c80b42 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.796129] env[62405]: DEBUG oslo_vmware.api [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Waiting for the task: (returnval){ [ 1668.796129] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52f797f4-214c-7c89-6305-8ffabdb3b091" [ 1668.796129] env[62405]: _type = "Task" [ 1668.796129] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1668.805642] env[62405]: DEBUG oslo_vmware.api [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52f797f4-214c-7c89-6305-8ffabdb3b091, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1668.833059] env[62405]: DEBUG nova.compute.manager [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: f0ca0d3d-cb2b-467b-a466-c270794055d7] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1669.004064] env[62405]: DEBUG oslo_vmware.api [None req-85c5669c-3ad3-487a-b7d6-d43292c71dd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947279, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.265006} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1669.004793] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-85c5669c-3ad3-487a-b7d6-d43292c71dd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1669.005098] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-85c5669c-3ad3-487a-b7d6-d43292c71dd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a6a0e918-425d-44de-a22b-8779e9108533] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1669.005417] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-85c5669c-3ad3-487a-b7d6-d43292c71dd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a6a0e918-425d-44de-a22b-8779e9108533] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1669.060012] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ffd96f4e-6412-4601-b309-798a439a57ab tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1669.180073] env[62405]: DEBUG nova.compute.manager [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] [instance: 4c8c0d2f-d8d3-4422-8a5c-8999636b22be] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1669.238638] env[62405]: DEBUG nova.network.neutron [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: f0ca0d3d-cb2b-467b-a466-c270794055d7] Successfully created port: 44befb6d-082c-47e0-9834-f3c7dc3d3210 {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1669.277787] env[62405]: DEBUG nova.network.neutron [req-9f661986-81ce-41c1-8477-10ff0ab9e7b3 req-8ab04302-417c-4b45-bd13-019ec70be37a service nova] [instance: 377365a4-7538-4bab-a181-1940e6fb4066] Updated VIF entry in instance network info cache for port e57e57ca-cb20-4bcb-bdee-5c96e246e949. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1669.277937] env[62405]: DEBUG nova.network.neutron [req-9f661986-81ce-41c1-8477-10ff0ab9e7b3 req-8ab04302-417c-4b45-bd13-019ec70be37a service nova] [instance: 377365a4-7538-4bab-a181-1940e6fb4066] Updating instance_info_cache with network_info: [{"id": "e57e57ca-cb20-4bcb-bdee-5c96e246e949", "address": "fa:16:3e:d5:1c:86", "network": {"id": "d33e0185-2b73-4ceb-9fb9-434f6a6e7a70", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-422171323-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f3b6dadf205e4d11b64ea03fac66c712", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1e7173e-4163-4212-9339-aea3eddd359e", "external-id": "nsx-vlan-transportzone-525", "segmentation_id": 525, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape57e57ca-cb", "ovs_interfaceid": "e57e57ca-cb20-4bcb-bdee-5c96e246e949", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1669.312332] env[62405]: DEBUG oslo_vmware.api [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52f797f4-214c-7c89-6305-8ffabdb3b091, 'name': SearchDatastore_Task, 'duration_secs': 0.015762} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1669.312923] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1669.313229] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] [instance: 377365a4-7538-4bab-a181-1940e6fb4066] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1669.313780] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1669.313780] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1669.313780] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1669.314187] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-87374bb3-edfe-46f3-baa1-c975ae75b388 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.325262] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1669.325370] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1669.326143] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ee0bec16-e44e-4018-8c7d-5be514521976 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.336683] env[62405]: DEBUG oslo_vmware.api [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Waiting for the task: (returnval){ [ 1669.336683] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52ab8ae3-3104-6880-4b2e-a1ed4c28a416" [ 1669.336683] env[62405]: _type = "Task" [ 1669.336683] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1669.351950] env[62405]: DEBUG oslo_vmware.api [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52ab8ae3-3104-6880-4b2e-a1ed4c28a416, 'name': SearchDatastore_Task, 'duration_secs': 0.012488} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1669.352825] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-29e1c561-2f6c-4198-8db2-6855137693bc {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.359335] env[62405]: DEBUG oslo_vmware.api [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Waiting for the task: (returnval){ [ 1669.359335] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]522a5f2f-2d00-f18e-3a3f-5009e8f58e12" [ 1669.359335] env[62405]: _type = "Task" [ 1669.359335] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1669.372164] env[62405]: DEBUG oslo_vmware.api [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]522a5f2f-2d00-f18e-3a3f-5009e8f58e12, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1669.382670] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6e17c38-45de-45ce-b78f-e46c42707d55 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.392064] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b20bd09-85fc-4059-a916-bbb94c5b39f0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.425565] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42c7dc6f-b97f-4f09-af9a-d2b63703ee20 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.434287] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-247b2cef-f3b4-4f7a-9728-f73627a0f4d2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.452204] env[62405]: DEBUG nova.compute.provider_tree [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1669.542245] env[62405]: DEBUG oslo_concurrency.lockutils [None req-128beb1f-ca94-4320-aacd-f2b1988917b3 tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Acquiring lock "9b71f962-2b92-4f7b-bb8d-b50da5130018" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1669.542245] env[62405]: DEBUG oslo_concurrency.lockutils [None req-128beb1f-ca94-4320-aacd-f2b1988917b3 tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Lock "9b71f962-2b92-4f7b-bb8d-b50da5130018" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1669.542245] env[62405]: DEBUG oslo_concurrency.lockutils [None req-128beb1f-ca94-4320-aacd-f2b1988917b3 tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Acquiring lock "9b71f962-2b92-4f7b-bb8d-b50da5130018-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1669.542245] env[62405]: DEBUG oslo_concurrency.lockutils [None req-128beb1f-ca94-4320-aacd-f2b1988917b3 tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Lock "9b71f962-2b92-4f7b-bb8d-b50da5130018-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1669.542245] env[62405]: DEBUG oslo_concurrency.lockutils [None req-128beb1f-ca94-4320-aacd-f2b1988917b3 tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Lock "9b71f962-2b92-4f7b-bb8d-b50da5130018-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1669.543083] env[62405]: INFO nova.compute.manager [None req-128beb1f-ca94-4320-aacd-f2b1988917b3 tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] [instance: 9b71f962-2b92-4f7b-bb8d-b50da5130018] Terminating instance [ 1669.706030] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1669.766894] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3305b2f0-80e4-45d3-9655-4c486b59e110 tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] Acquiring lock "fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1669.766894] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3305b2f0-80e4-45d3-9655-4c486b59e110 tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] Lock "fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1669.767121] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3305b2f0-80e4-45d3-9655-4c486b59e110 tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] Acquiring lock "fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1669.768085] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3305b2f0-80e4-45d3-9655-4c486b59e110 tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] Lock "fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1669.768085] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3305b2f0-80e4-45d3-9655-4c486b59e110 tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] Lock "fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1669.769977] env[62405]: INFO nova.compute.manager [None req-3305b2f0-80e4-45d3-9655-4c486b59e110 tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] Terminating instance [ 1669.784201] env[62405]: DEBUG oslo_concurrency.lockutils [req-9f661986-81ce-41c1-8477-10ff0ab9e7b3 req-8ab04302-417c-4b45-bd13-019ec70be37a service nova] Releasing lock "refresh_cache-377365a4-7538-4bab-a181-1940e6fb4066" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1669.846929] env[62405]: DEBUG nova.compute.manager [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: f0ca0d3d-cb2b-467b-a466-c270794055d7] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1669.872552] env[62405]: DEBUG oslo_vmware.api [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]522a5f2f-2d00-f18e-3a3f-5009e8f58e12, 'name': SearchDatastore_Task, 'duration_secs': 0.012585} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1669.873837] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1669.874502] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 377365a4-7538-4bab-a181-1940e6fb4066/377365a4-7538-4bab-a181-1940e6fb4066.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1669.875050] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-67ec2cb6-bd46-4fef-9e8f-d42815443fd1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.882249] env[62405]: DEBUG nova.virt.hardware [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1669.882249] env[62405]: DEBUG nova.virt.hardware [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1669.882249] env[62405]: DEBUG nova.virt.hardware [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1669.882249] env[62405]: DEBUG nova.virt.hardware [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1669.882249] env[62405]: DEBUG nova.virt.hardware [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1669.882641] env[62405]: DEBUG nova.virt.hardware [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1669.883055] env[62405]: DEBUG nova.virt.hardware [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1669.883383] env[62405]: DEBUG nova.virt.hardware [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1669.883701] env[62405]: DEBUG nova.virt.hardware [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1669.884035] env[62405]: DEBUG nova.virt.hardware [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1669.884370] env[62405]: DEBUG nova.virt.hardware [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1669.885669] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72c94654-fc20-4b34-82c2-d19fd13a9689 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.892283] env[62405]: DEBUG oslo_vmware.api [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Waiting for the task: (returnval){ [ 1669.892283] env[62405]: value = "task-1947280" [ 1669.892283] env[62405]: _type = "Task" [ 1669.892283] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1669.902564] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79df510a-5782-4717-8ce0-d60f0f870cd3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.910716] env[62405]: DEBUG oslo_vmware.api [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Task: {'id': task-1947280, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1669.959751] env[62405]: DEBUG nova.scheduler.client.report [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1670.049028] env[62405]: DEBUG nova.compute.manager [None req-128beb1f-ca94-4320-aacd-f2b1988917b3 tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] [instance: 9b71f962-2b92-4f7b-bb8d-b50da5130018] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1670.049261] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-128beb1f-ca94-4320-aacd-f2b1988917b3 tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] [instance: 9b71f962-2b92-4f7b-bb8d-b50da5130018] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1670.050210] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3564e69-1f1d-41a2-ac0c-c9be24ee0300 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.053591] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-393d79bc-b5e4-4999-9705-c3faff616f4f tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] [instance: 6213702e-8e39-4342-b62f-2c9495017bf9] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1670.054548] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-aa386e71-ff1e-41aa-ab71-612db0603132 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.064344] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-128beb1f-ca94-4320-aacd-f2b1988917b3 tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] [instance: 9b71f962-2b92-4f7b-bb8d-b50da5130018] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1670.065989] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f067e5d9-ef80-4556-b9d8-49fc3c34578b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.067618] env[62405]: DEBUG oslo_vmware.api [None req-393d79bc-b5e4-4999-9705-c3faff616f4f tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Waiting for the task: (returnval){ [ 1670.067618] env[62405]: value = "task-1947281" [ 1670.067618] env[62405]: _type = "Task" [ 1670.067618] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1670.074147] env[62405]: DEBUG nova.virt.hardware [None req-85c5669c-3ad3-487a-b7d6-d43292c71dd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1670.074416] env[62405]: DEBUG nova.virt.hardware [None req-85c5669c-3ad3-487a-b7d6-d43292c71dd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1670.074595] env[62405]: DEBUG nova.virt.hardware [None req-85c5669c-3ad3-487a-b7d6-d43292c71dd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1670.074803] env[62405]: DEBUG nova.virt.hardware [None req-85c5669c-3ad3-487a-b7d6-d43292c71dd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1670.074960] env[62405]: DEBUG nova.virt.hardware [None req-85c5669c-3ad3-487a-b7d6-d43292c71dd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1670.075220] env[62405]: DEBUG nova.virt.hardware [None req-85c5669c-3ad3-487a-b7d6-d43292c71dd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1670.075583] env[62405]: DEBUG nova.virt.hardware [None req-85c5669c-3ad3-487a-b7d6-d43292c71dd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1670.075914] env[62405]: DEBUG nova.virt.hardware [None req-85c5669c-3ad3-487a-b7d6-d43292c71dd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1670.075980] env[62405]: DEBUG nova.virt.hardware [None req-85c5669c-3ad3-487a-b7d6-d43292c71dd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1670.076376] env[62405]: DEBUG nova.virt.hardware [None req-85c5669c-3ad3-487a-b7d6-d43292c71dd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1670.076376] env[62405]: DEBUG nova.virt.hardware [None req-85c5669c-3ad3-487a-b7d6-d43292c71dd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1670.077439] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c84e044-6e27-4719-b498-a812faa3269b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.081571] env[62405]: DEBUG oslo_vmware.api [None req-128beb1f-ca94-4320-aacd-f2b1988917b3 tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Waiting for the task: (returnval){ [ 1670.081571] env[62405]: value = "task-1947282" [ 1670.081571] env[62405]: _type = "Task" [ 1670.081571] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1670.094599] env[62405]: DEBUG oslo_vmware.api [None req-393d79bc-b5e4-4999-9705-c3faff616f4f tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Task: {'id': task-1947281, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1670.097015] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98f5c13f-ff40-4bfb-a341-08844d515080 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.106861] env[62405]: DEBUG oslo_vmware.api [None req-128beb1f-ca94-4320-aacd-f2b1988917b3 tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Task: {'id': task-1947282, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1670.120072] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-85c5669c-3ad3-487a-b7d6-d43292c71dd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a6a0e918-425d-44de-a22b-8779e9108533] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c1:e4:0a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ca83c3bc-f3ec-42ab-85b3-192512f766f3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd504fb4b-5637-4d63-aaa3-5273e3b34481', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1670.128796] env[62405]: DEBUG oslo.service.loopingcall [None req-85c5669c-3ad3-487a-b7d6-d43292c71dd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1670.129269] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a6a0e918-425d-44de-a22b-8779e9108533] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1670.129557] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-239b6567-e527-479a-97af-190da656f89f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.152843] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1670.152843] env[62405]: value = "task-1947283" [ 1670.152843] env[62405]: _type = "Task" [ 1670.152843] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1670.163528] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947283, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1670.222262] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1670.222572] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1670.275068] env[62405]: DEBUG nova.compute.manager [None req-3305b2f0-80e4-45d3-9655-4c486b59e110 tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1670.275541] env[62405]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0ee82efa-7302-4f9c-9187-e31d7820dab2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.289589] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-143b84a0-4e6e-4b7b-8790-915d0af4a0fd {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.340032] env[62405]: WARNING nova.virt.vmwareapi.driver [None req-3305b2f0-80e4-45d3-9655-4c486b59e110 tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9 could not be found. [ 1670.340032] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-3305b2f0-80e4-45d3-9655-4c486b59e110 tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1670.340032] env[62405]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d1185b95-5e41-4be2-a7c2-dad153807989 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.349756] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cdb496c-58cb-48c2-ac1c-0f505a11bc3a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.393437] env[62405]: WARNING nova.virt.vmwareapi.vmops [None req-3305b2f0-80e4-45d3-9655-4c486b59e110 tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9 could not be found. [ 1670.393583] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-3305b2f0-80e4-45d3-9655-4c486b59e110 tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1670.393845] env[62405]: INFO nova.compute.manager [None req-3305b2f0-80e4-45d3-9655-4c486b59e110 tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] Took 0.12 seconds to destroy the instance on the hypervisor. [ 1670.394089] env[62405]: DEBUG oslo.service.loopingcall [None req-3305b2f0-80e4-45d3-9655-4c486b59e110 tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1670.395173] env[62405]: DEBUG nova.compute.manager [-] [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1670.395313] env[62405]: DEBUG nova.network.neutron [-] [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1670.409511] env[62405]: DEBUG oslo_vmware.api [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Task: {'id': task-1947280, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1670.470410] env[62405]: DEBUG oslo_concurrency.lockutils [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.650s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1670.471060] env[62405]: DEBUG nova.compute.manager [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] [instance: 34ec55c6-1a7a-4ffa-8efd-9eedd7495d44] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1670.474548] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.772s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1670.476634] env[62405]: INFO nova.compute.claims [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 6c6a3974-c87e-47ed-a025-d6221a8decd7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1670.501296] env[62405]: DEBUG oslo_concurrency.lockutils [None req-18ca5fbd-bc81-43c5-8f75-59042c3ac786 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquiring lock "refresh_cache-23748dfd-7c60-41db-8acb-7b49cf1c27db" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1670.502167] env[62405]: DEBUG oslo_concurrency.lockutils [None req-18ca5fbd-bc81-43c5-8f75-59042c3ac786 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquired lock "refresh_cache-23748dfd-7c60-41db-8acb-7b49cf1c27db" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1670.502167] env[62405]: DEBUG nova.network.neutron [None req-18ca5fbd-bc81-43c5-8f75-59042c3ac786 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 23748dfd-7c60-41db-8acb-7b49cf1c27db] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1670.579856] env[62405]: DEBUG oslo_vmware.api [None req-393d79bc-b5e4-4999-9705-c3faff616f4f tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Task: {'id': task-1947281, 'name': PowerOffVM_Task, 'duration_secs': 0.35524} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1670.580543] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-393d79bc-b5e4-4999-9705-c3faff616f4f tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] [instance: 6213702e-8e39-4342-b62f-2c9495017bf9] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1670.581482] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a10d853-d2b0-4b98-8c7b-3323c83db8a3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.608404] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e631829d-5bff-4efe-b942-407986f699be {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.611617] env[62405]: DEBUG oslo_vmware.api [None req-128beb1f-ca94-4320-aacd-f2b1988917b3 tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Task: {'id': task-1947282, 'name': PowerOffVM_Task, 'duration_secs': 0.283806} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1670.611885] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-128beb1f-ca94-4320-aacd-f2b1988917b3 tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] [instance: 9b71f962-2b92-4f7b-bb8d-b50da5130018] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1670.612064] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-128beb1f-ca94-4320-aacd-f2b1988917b3 tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] [instance: 9b71f962-2b92-4f7b-bb8d-b50da5130018] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1670.612800] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7feeb3fe-b364-47c0-92c6-eefb57818d56 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.645677] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-393d79bc-b5e4-4999-9705-c3faff616f4f tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] [instance: 6213702e-8e39-4342-b62f-2c9495017bf9] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1670.646559] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-36c054d6-8af9-4ade-b687-6d4302e78342 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.659466] env[62405]: DEBUG oslo_vmware.api [None req-393d79bc-b5e4-4999-9705-c3faff616f4f tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Waiting for the task: (returnval){ [ 1670.659466] env[62405]: value = "task-1947285" [ 1670.659466] env[62405]: _type = "Task" [ 1670.659466] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1670.668523] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947283, 'name': CreateVM_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1670.675405] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-393d79bc-b5e4-4999-9705-c3faff616f4f tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] [instance: 6213702e-8e39-4342-b62f-2c9495017bf9] VM already powered off {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1670.675739] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-393d79bc-b5e4-4999-9705-c3faff616f4f tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] [instance: 6213702e-8e39-4342-b62f-2c9495017bf9] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1670.676048] env[62405]: DEBUG oslo_concurrency.lockutils [None req-393d79bc-b5e4-4999-9705-c3faff616f4f tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1670.676183] env[62405]: DEBUG oslo_concurrency.lockutils [None req-393d79bc-b5e4-4999-9705-c3faff616f4f tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1670.676553] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-393d79bc-b5e4-4999-9705-c3faff616f4f tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1670.677203] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7621d186-aa47-4cc9-8b2e-9a3e8276516c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.688858] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-393d79bc-b5e4-4999-9705-c3faff616f4f tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1670.689071] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-393d79bc-b5e4-4999-9705-c3faff616f4f tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1670.689892] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c4c17858-77a7-47f5-9806-a1e508db8628 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.696270] env[62405]: DEBUG oslo_vmware.api [None req-393d79bc-b5e4-4999-9705-c3faff616f4f tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Waiting for the task: (returnval){ [ 1670.696270] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]525f51da-1773-977e-f904-fc9c159f0af8" [ 1670.696270] env[62405]: _type = "Task" [ 1670.696270] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1670.708676] env[62405]: DEBUG oslo_vmware.api [None req-393d79bc-b5e4-4999-9705-c3faff616f4f tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]525f51da-1773-977e-f904-fc9c159f0af8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1670.742889] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1670.742889] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Starting heal instance info cache {{(pid=62405) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10446}} [ 1670.744411] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-128beb1f-ca94-4320-aacd-f2b1988917b3 tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] [instance: 9b71f962-2b92-4f7b-bb8d-b50da5130018] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1670.745282] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-128beb1f-ca94-4320-aacd-f2b1988917b3 tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] [instance: 9b71f962-2b92-4f7b-bb8d-b50da5130018] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1670.745282] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-128beb1f-ca94-4320-aacd-f2b1988917b3 tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Deleting the datastore file [datastore1] 9b71f962-2b92-4f7b-bb8d-b50da5130018 {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1670.745282] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3a399c8a-99bc-4bda-ab53-88eb2a09fff5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.754984] env[62405]: DEBUG oslo_vmware.api [None req-128beb1f-ca94-4320-aacd-f2b1988917b3 tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Waiting for the task: (returnval){ [ 1670.754984] env[62405]: value = "task-1947286" [ 1670.754984] env[62405]: _type = "Task" [ 1670.754984] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1670.765347] env[62405]: DEBUG nova.compute.manager [req-82dbf392-fd25-4b89-be16-e2ca02b2af4c req-baac7cce-a8be-46d2-a517-9545f454e624 service nova] [instance: 23748dfd-7c60-41db-8acb-7b49cf1c27db] Received event network-vif-deleted-2a761bb8-a966-4a87-98b7-183fc71da74b {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1670.765579] env[62405]: INFO nova.compute.manager [req-82dbf392-fd25-4b89-be16-e2ca02b2af4c req-baac7cce-a8be-46d2-a517-9545f454e624 service nova] [instance: 23748dfd-7c60-41db-8acb-7b49cf1c27db] Neutron deleted interface 2a761bb8-a966-4a87-98b7-183fc71da74b; detaching it from the instance and deleting it from the info cache [ 1670.765883] env[62405]: DEBUG nova.network.neutron [req-82dbf392-fd25-4b89-be16-e2ca02b2af4c req-baac7cce-a8be-46d2-a517-9545f454e624 service nova] [instance: 23748dfd-7c60-41db-8acb-7b49cf1c27db] Updating instance_info_cache with network_info: [{"id": "666e898c-754c-4b07-b0d9-dac2a9a5bc6d", "address": "fa:16:3e:59:60:92", "network": {"id": "8e7f7222-48db-4dd5-a9e8-9a6d2b598918", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1945720715-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.216", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba9083cddcc24345b6ea5d2cbbbec5ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap666e898c-75", "ovs_interfaceid": "666e898c-754c-4b07-b0d9-dac2a9a5bc6d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1670.770767] env[62405]: DEBUG oslo_vmware.api [None req-128beb1f-ca94-4320-aacd-f2b1988917b3 tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Task: {'id': task-1947286, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1670.871973] env[62405]: DEBUG oslo_concurrency.lockutils [None req-39a76771-768e-4c4e-92eb-e674f5cca5fb tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Acquiring lock "a9f83357-4898-44ff-a6d8-ea6621453de9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1670.872382] env[62405]: DEBUG oslo_concurrency.lockutils [None req-39a76771-768e-4c4e-92eb-e674f5cca5fb tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Lock "a9f83357-4898-44ff-a6d8-ea6621453de9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1670.872628] env[62405]: DEBUG oslo_concurrency.lockutils [None req-39a76771-768e-4c4e-92eb-e674f5cca5fb tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Acquiring lock "a9f83357-4898-44ff-a6d8-ea6621453de9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1670.873319] env[62405]: DEBUG oslo_concurrency.lockutils [None req-39a76771-768e-4c4e-92eb-e674f5cca5fb tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Lock "a9f83357-4898-44ff-a6d8-ea6621453de9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1670.873978] env[62405]: DEBUG oslo_concurrency.lockutils [None req-39a76771-768e-4c4e-92eb-e674f5cca5fb tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Lock "a9f83357-4898-44ff-a6d8-ea6621453de9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1670.875896] env[62405]: INFO nova.compute.manager [None req-39a76771-768e-4c4e-92eb-e674f5cca5fb tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] [instance: a9f83357-4898-44ff-a6d8-ea6621453de9] Terminating instance [ 1670.905873] env[62405]: DEBUG oslo_vmware.api [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Task: {'id': task-1947280, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.601529} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1670.906274] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 377365a4-7538-4bab-a181-1940e6fb4066/377365a4-7538-4bab-a181-1940e6fb4066.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1670.906529] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] [instance: 377365a4-7538-4bab-a181-1940e6fb4066] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1670.906824] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-55e9eb7f-1468-4b27-86a0-a2fe604a493e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.914547] env[62405]: DEBUG oslo_vmware.api [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Waiting for the task: (returnval){ [ 1670.914547] env[62405]: value = "task-1947287" [ 1670.914547] env[62405]: _type = "Task" [ 1670.914547] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1670.926254] env[62405]: DEBUG oslo_vmware.api [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Task: {'id': task-1947287, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1670.982200] env[62405]: DEBUG nova.compute.utils [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1670.985475] env[62405]: DEBUG nova.compute.manager [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] [instance: 34ec55c6-1a7a-4ffa-8efd-9eedd7495d44] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1670.985475] env[62405]: DEBUG nova.network.neutron [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] [instance: 34ec55c6-1a7a-4ffa-8efd-9eedd7495d44] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1671.035019] env[62405]: DEBUG nova.policy [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2ae271e171d54bf4b1af909e68d3e449', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '915d6ea5e5184efab9fbeda21e3b8a64', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1671.167796] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947283, 'name': CreateVM_Task, 'duration_secs': 0.622652} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1671.167973] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a6a0e918-425d-44de-a22b-8779e9108533] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1671.168770] env[62405]: DEBUG oslo_concurrency.lockutils [None req-85c5669c-3ad3-487a-b7d6-d43292c71dd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1671.168940] env[62405]: DEBUG oslo_concurrency.lockutils [None req-85c5669c-3ad3-487a-b7d6-d43292c71dd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1671.171947] env[62405]: DEBUG oslo_concurrency.lockutils [None req-85c5669c-3ad3-487a-b7d6-d43292c71dd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1671.172085] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-332f9aab-4b51-4cbd-bec0-9349d4bd22b4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.178571] env[62405]: DEBUG oslo_vmware.api [None req-85c5669c-3ad3-487a-b7d6-d43292c71dd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for the task: (returnval){ [ 1671.178571] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52cbf124-6d22-4fe8-8454-28fffdb75319" [ 1671.178571] env[62405]: _type = "Task" [ 1671.178571] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1671.190847] env[62405]: DEBUG oslo_vmware.api [None req-85c5669c-3ad3-487a-b7d6-d43292c71dd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52cbf124-6d22-4fe8-8454-28fffdb75319, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1671.206888] env[62405]: DEBUG oslo_vmware.api [None req-393d79bc-b5e4-4999-9705-c3faff616f4f tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]525f51da-1773-977e-f904-fc9c159f0af8, 'name': SearchDatastore_Task, 'duration_secs': 0.023437} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1671.207765] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d1ed64e4-9f4b-4134-8abe-7648ef583b57 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.215052] env[62405]: DEBUG oslo_vmware.api [None req-393d79bc-b5e4-4999-9705-c3faff616f4f tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Waiting for the task: (returnval){ [ 1671.215052] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52f717c1-c38c-ba90-f986-2742c237fd89" [ 1671.215052] env[62405]: _type = "Task" [ 1671.215052] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1671.226618] env[62405]: DEBUG oslo_vmware.api [None req-393d79bc-b5e4-4999-9705-c3faff616f4f tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52f717c1-c38c-ba90-f986-2742c237fd89, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1671.240681] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Didn't find any instances for network info cache update. {{(pid=62405) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10532}} [ 1671.240681] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1671.240681] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1671.240681] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1671.240681] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1671.240681] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1671.240681] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1671.240681] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62405) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11065}} [ 1671.240681] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager.update_available_resource {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1671.274765] env[62405]: DEBUG oslo_concurrency.lockutils [req-82dbf392-fd25-4b89-be16-e2ca02b2af4c req-baac7cce-a8be-46d2-a517-9545f454e624 service nova] Acquiring lock "23748dfd-7c60-41db-8acb-7b49cf1c27db" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1671.275036] env[62405]: DEBUG oslo_concurrency.lockutils [req-82dbf392-fd25-4b89-be16-e2ca02b2af4c req-baac7cce-a8be-46d2-a517-9545f454e624 service nova] Acquired lock "23748dfd-7c60-41db-8acb-7b49cf1c27db" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1671.275375] env[62405]: DEBUG oslo_vmware.api [None req-128beb1f-ca94-4320-aacd-f2b1988917b3 tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Task: {'id': task-1947286, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.183829} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1671.276423] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9672507-8be0-47f4-ad79-c15c70db6480 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.280525] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-128beb1f-ca94-4320-aacd-f2b1988917b3 tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1671.280869] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-128beb1f-ca94-4320-aacd-f2b1988917b3 tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] [instance: 9b71f962-2b92-4f7b-bb8d-b50da5130018] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1671.281197] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-128beb1f-ca94-4320-aacd-f2b1988917b3 tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] [instance: 9b71f962-2b92-4f7b-bb8d-b50da5130018] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1671.281498] env[62405]: INFO nova.compute.manager [None req-128beb1f-ca94-4320-aacd-f2b1988917b3 tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] [instance: 9b71f962-2b92-4f7b-bb8d-b50da5130018] Took 1.23 seconds to destroy the instance on the hypervisor. [ 1671.281917] env[62405]: DEBUG oslo.service.loopingcall [None req-128beb1f-ca94-4320-aacd-f2b1988917b3 tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1671.282883] env[62405]: DEBUG nova.compute.manager [-] [instance: 9b71f962-2b92-4f7b-bb8d-b50da5130018] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1671.282883] env[62405]: DEBUG nova.network.neutron [-] [instance: 9b71f962-2b92-4f7b-bb8d-b50da5130018] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1671.301551] env[62405]: DEBUG oslo_concurrency.lockutils [req-82dbf392-fd25-4b89-be16-e2ca02b2af4c req-baac7cce-a8be-46d2-a517-9545f454e624 service nova] Releasing lock "23748dfd-7c60-41db-8acb-7b49cf1c27db" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1671.301832] env[62405]: WARNING nova.compute.manager [req-82dbf392-fd25-4b89-be16-e2ca02b2af4c req-baac7cce-a8be-46d2-a517-9545f454e624 service nova] [instance: 23748dfd-7c60-41db-8acb-7b49cf1c27db] Detach interface failed, port_id=2a761bb8-a966-4a87-98b7-183fc71da74b, reason: No device with interface-id 2a761bb8-a966-4a87-98b7-183fc71da74b exists on VM: nova.exception.NotFound: No device with interface-id 2a761bb8-a966-4a87-98b7-183fc71da74b exists on VM [ 1671.380569] env[62405]: DEBUG nova.compute.manager [None req-39a76771-768e-4c4e-92eb-e674f5cca5fb tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] [instance: a9f83357-4898-44ff-a6d8-ea6621453de9] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1671.380887] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-39a76771-768e-4c4e-92eb-e674f5cca5fb tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] [instance: a9f83357-4898-44ff-a6d8-ea6621453de9] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1671.381709] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1610445d-4333-4196-844f-e59d6a026557 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.390859] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-39a76771-768e-4c4e-92eb-e674f5cca5fb tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] [instance: a9f83357-4898-44ff-a6d8-ea6621453de9] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1671.391160] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-aa2919bf-6711-4290-a92e-5a7a1788f77d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.425252] env[62405]: DEBUG oslo_vmware.api [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Task: {'id': task-1947287, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077859} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1671.428491] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] [instance: 377365a4-7538-4bab-a181-1940e6fb4066] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1671.428793] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc1a5819-15e7-451a-8ed8-b8a6d33e7144 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.453319] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] [instance: 377365a4-7538-4bab-a181-1940e6fb4066] Reconfiguring VM instance instance-00000031 to attach disk [datastore1] 377365a4-7538-4bab-a181-1940e6fb4066/377365a4-7538-4bab-a181-1940e6fb4066.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1671.453818] env[62405]: DEBUG oslo_concurrency.lockutils [None req-33f5353e-7e6d-44ea-9d5c-3648331601cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquiring lock "23748dfd-7c60-41db-8acb-7b49cf1c27db" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1671.454129] env[62405]: DEBUG oslo_concurrency.lockutils [None req-33f5353e-7e6d-44ea-9d5c-3648331601cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Lock "23748dfd-7c60-41db-8acb-7b49cf1c27db" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1671.454351] env[62405]: DEBUG oslo_concurrency.lockutils [None req-33f5353e-7e6d-44ea-9d5c-3648331601cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquiring lock "23748dfd-7c60-41db-8acb-7b49cf1c27db-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1671.454513] env[62405]: DEBUG oslo_concurrency.lockutils [None req-33f5353e-7e6d-44ea-9d5c-3648331601cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Lock "23748dfd-7c60-41db-8acb-7b49cf1c27db-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1671.454642] env[62405]: DEBUG oslo_concurrency.lockutils [None req-33f5353e-7e6d-44ea-9d5c-3648331601cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Lock "23748dfd-7c60-41db-8acb-7b49cf1c27db-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1671.456549] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5e4cebe8-d739-426a-860c-a83a00c897ee {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.473639] env[62405]: INFO nova.compute.manager [None req-33f5353e-7e6d-44ea-9d5c-3648331601cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 23748dfd-7c60-41db-8acb-7b49cf1c27db] Terminating instance [ 1671.477206] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-39a76771-768e-4c4e-92eb-e674f5cca5fb tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] [instance: a9f83357-4898-44ff-a6d8-ea6621453de9] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1671.477470] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-39a76771-768e-4c4e-92eb-e674f5cca5fb tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] [instance: a9f83357-4898-44ff-a6d8-ea6621453de9] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1671.477701] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-39a76771-768e-4c4e-92eb-e674f5cca5fb tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Deleting the datastore file [datastore1] a9f83357-4898-44ff-a6d8-ea6621453de9 {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1671.479378] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-856dbaea-7e54-4cf2-9347-7f398c764fad {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.485644] env[62405]: DEBUG nova.compute.utils [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1671.487298] env[62405]: DEBUG oslo_vmware.api [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Waiting for the task: (returnval){ [ 1671.487298] env[62405]: value = "task-1947289" [ 1671.487298] env[62405]: _type = "Task" [ 1671.487298] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1671.495408] env[62405]: DEBUG oslo_vmware.api [None req-39a76771-768e-4c4e-92eb-e674f5cca5fb tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Waiting for the task: (returnval){ [ 1671.495408] env[62405]: value = "task-1947290" [ 1671.495408] env[62405]: _type = "Task" [ 1671.495408] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1671.505404] env[62405]: DEBUG oslo_vmware.api [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Task: {'id': task-1947289, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1671.511716] env[62405]: DEBUG oslo_vmware.api [None req-39a76771-768e-4c4e-92eb-e674f5cca5fb tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Task: {'id': task-1947290, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1671.671320] env[62405]: DEBUG nova.network.neutron [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] [instance: 34ec55c6-1a7a-4ffa-8efd-9eedd7495d44] Successfully created port: 6afd5e2e-fe5f-4f25-a879-a25672a67740 {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1671.694099] env[62405]: DEBUG oslo_vmware.api [None req-85c5669c-3ad3-487a-b7d6-d43292c71dd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52cbf124-6d22-4fe8-8454-28fffdb75319, 'name': SearchDatastore_Task, 'duration_secs': 0.011798} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1671.694415] env[62405]: DEBUG oslo_concurrency.lockutils [None req-85c5669c-3ad3-487a-b7d6-d43292c71dd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1671.694715] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-85c5669c-3ad3-487a-b7d6-d43292c71dd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a6a0e918-425d-44de-a22b-8779e9108533] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1671.694929] env[62405]: DEBUG oslo_concurrency.lockutils [None req-85c5669c-3ad3-487a-b7d6-d43292c71dd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1671.731882] env[62405]: DEBUG oslo_vmware.api [None req-393d79bc-b5e4-4999-9705-c3faff616f4f tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52f717c1-c38c-ba90-f986-2742c237fd89, 'name': SearchDatastore_Task, 'duration_secs': 0.012346} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1671.732159] env[62405]: DEBUG oslo_concurrency.lockutils [None req-393d79bc-b5e4-4999-9705-c3faff616f4f tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1671.732420] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-393d79bc-b5e4-4999-9705-c3faff616f4f tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 6213702e-8e39-4342-b62f-2c9495017bf9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9-rescue.vmdk. {{(pid=62405) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1671.732702] env[62405]: DEBUG oslo_concurrency.lockutils [None req-85c5669c-3ad3-487a-b7d6-d43292c71dd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1671.732920] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-85c5669c-3ad3-487a-b7d6-d43292c71dd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1671.733158] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-58cadca9-448a-48c5-9f8c-df065e568b08 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.738513] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9630488a-5583-4a44-b6e6-ac8f4207af85 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.745585] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1671.747532] env[62405]: DEBUG oslo_vmware.api [None req-393d79bc-b5e4-4999-9705-c3faff616f4f tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Waiting for the task: (returnval){ [ 1671.747532] env[62405]: value = "task-1947291" [ 1671.747532] env[62405]: _type = "Task" [ 1671.747532] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1671.751746] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-85c5669c-3ad3-487a-b7d6-d43292c71dd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1671.752107] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-85c5669c-3ad3-487a-b7d6-d43292c71dd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1671.752665] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5e53126f-3f1f-48d7-8be5-496449745c81 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.757548] env[62405]: DEBUG nova.network.neutron [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: f0ca0d3d-cb2b-467b-a466-c270794055d7] Successfully updated port: 44befb6d-082c-47e0-9834-f3c7dc3d3210 {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1671.767993] env[62405]: DEBUG oslo_vmware.api [None req-85c5669c-3ad3-487a-b7d6-d43292c71dd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for the task: (returnval){ [ 1671.767993] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52c94a3d-920d-a2fa-4054-0abf43b28f77" [ 1671.767993] env[62405]: _type = "Task" [ 1671.767993] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1671.768199] env[62405]: DEBUG oslo_vmware.api [None req-393d79bc-b5e4-4999-9705-c3faff616f4f tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Task: {'id': task-1947291, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1671.769775] env[62405]: INFO nova.network.neutron [None req-18ca5fbd-bc81-43c5-8f75-59042c3ac786 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 23748dfd-7c60-41db-8acb-7b49cf1c27db] Port 2a761bb8-a966-4a87-98b7-183fc71da74b from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1671.770161] env[62405]: DEBUG nova.network.neutron [None req-18ca5fbd-bc81-43c5-8f75-59042c3ac786 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 23748dfd-7c60-41db-8acb-7b49cf1c27db] Updating instance_info_cache with network_info: [{"id": "666e898c-754c-4b07-b0d9-dac2a9a5bc6d", "address": "fa:16:3e:59:60:92", "network": {"id": "8e7f7222-48db-4dd5-a9e8-9a6d2b598918", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1945720715-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.216", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba9083cddcc24345b6ea5d2cbbbec5ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap666e898c-75", "ovs_interfaceid": "666e898c-754c-4b07-b0d9-dac2a9a5bc6d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1671.790141] env[62405]: DEBUG oslo_vmware.api [None req-85c5669c-3ad3-487a-b7d6-d43292c71dd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52c94a3d-920d-a2fa-4054-0abf43b28f77, 'name': SearchDatastore_Task, 'duration_secs': 0.014175} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1671.790542] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-61d41d11-0eb1-47b1-a4ae-8eee654b132a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.798756] env[62405]: DEBUG oslo_vmware.api [None req-85c5669c-3ad3-487a-b7d6-d43292c71dd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for the task: (returnval){ [ 1671.798756] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]527bbcfa-d85d-45c1-8572-3e65c1382a6d" [ 1671.798756] env[62405]: _type = "Task" [ 1671.798756] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1671.814750] env[62405]: DEBUG oslo_vmware.api [None req-85c5669c-3ad3-487a-b7d6-d43292c71dd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]527bbcfa-d85d-45c1-8572-3e65c1382a6d, 'name': SearchDatastore_Task, 'duration_secs': 0.013531} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1671.815122] env[62405]: DEBUG oslo_concurrency.lockutils [None req-85c5669c-3ad3-487a-b7d6-d43292c71dd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1671.815508] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-85c5669c-3ad3-487a-b7d6-d43292c71dd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] a6a0e918-425d-44de-a22b-8779e9108533/a6a0e918-425d-44de-a22b-8779e9108533.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1671.816240] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c81c809e-5ae1-4d89-9239-086921087966 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.829046] env[62405]: DEBUG oslo_vmware.api [None req-85c5669c-3ad3-487a-b7d6-d43292c71dd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for the task: (returnval){ [ 1671.829046] env[62405]: value = "task-1947292" [ 1671.829046] env[62405]: _type = "Task" [ 1671.829046] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1671.841188] env[62405]: DEBUG oslo_vmware.api [None req-85c5669c-3ad3-487a-b7d6-d43292c71dd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947292, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1671.986506] env[62405]: DEBUG nova.compute.manager [None req-33f5353e-7e6d-44ea-9d5c-3648331601cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 23748dfd-7c60-41db-8acb-7b49cf1c27db] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1671.986966] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-33f5353e-7e6d-44ea-9d5c-3648331601cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 23748dfd-7c60-41db-8acb-7b49cf1c27db] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1671.988880] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61a3d969-2386-4ac9-b249-0bfe5ddaae79 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.993267] env[62405]: DEBUG nova.compute.manager [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] [instance: 34ec55c6-1a7a-4ffa-8efd-9eedd7495d44] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1672.012507] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-33f5353e-7e6d-44ea-9d5c-3648331601cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 23748dfd-7c60-41db-8acb-7b49cf1c27db] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1672.013617] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e25bcf60-5adc-434d-85e3-d06c263dc81a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.019699] env[62405]: DEBUG oslo_vmware.api [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Task: {'id': task-1947289, 'name': ReconfigVM_Task, 'duration_secs': 0.404531} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1672.025200] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] [instance: 377365a4-7538-4bab-a181-1940e6fb4066] Reconfigured VM instance instance-00000031 to attach disk [datastore1] 377365a4-7538-4bab-a181-1940e6fb4066/377365a4-7538-4bab-a181-1940e6fb4066.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1672.025602] env[62405]: DEBUG oslo_vmware.api [None req-39a76771-768e-4c4e-92eb-e674f5cca5fb tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Task: {'id': task-1947290, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.232783} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1672.025908] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5550bb4c-8edf-443f-9c10-0586df0d20d1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.028459] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-39a76771-768e-4c4e-92eb-e674f5cca5fb tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1672.028673] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-39a76771-768e-4c4e-92eb-e674f5cca5fb tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] [instance: a9f83357-4898-44ff-a6d8-ea6621453de9] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1672.028875] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-39a76771-768e-4c4e-92eb-e674f5cca5fb tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] [instance: a9f83357-4898-44ff-a6d8-ea6621453de9] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1672.029091] env[62405]: INFO nova.compute.manager [None req-39a76771-768e-4c4e-92eb-e674f5cca5fb tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] [instance: a9f83357-4898-44ff-a6d8-ea6621453de9] Took 0.65 seconds to destroy the instance on the hypervisor. [ 1672.029383] env[62405]: DEBUG oslo.service.loopingcall [None req-39a76771-768e-4c4e-92eb-e674f5cca5fb tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1672.032858] env[62405]: DEBUG nova.compute.manager [-] [instance: a9f83357-4898-44ff-a6d8-ea6621453de9] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1672.032858] env[62405]: DEBUG nova.network.neutron [-] [instance: a9f83357-4898-44ff-a6d8-ea6621453de9] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1672.037344] env[62405]: DEBUG oslo_vmware.api [None req-33f5353e-7e6d-44ea-9d5c-3648331601cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Waiting for the task: (returnval){ [ 1672.037344] env[62405]: value = "task-1947293" [ 1672.037344] env[62405]: _type = "Task" [ 1672.037344] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1672.045021] env[62405]: DEBUG oslo_vmware.api [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Waiting for the task: (returnval){ [ 1672.045021] env[62405]: value = "task-1947294" [ 1672.045021] env[62405]: _type = "Task" [ 1672.045021] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1672.049691] env[62405]: DEBUG oslo_vmware.api [None req-33f5353e-7e6d-44ea-9d5c-3648331601cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947293, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1672.061132] env[62405]: DEBUG oslo_vmware.api [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Task: {'id': task-1947294, 'name': Rename_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1672.138167] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcdf2a64-4b61-46de-b27e-7b9a3e5e4222 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.154490] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0283c405-8d89-4f2e-b667-49d33ad34848 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.193689] env[62405]: DEBUG nova.network.neutron [-] [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1672.198979] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3adf354d-4bbb-4380-ab03-545c686f4dc0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.212160] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12ee8507-6e39-4294-b069-283c73443f41 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.234935] env[62405]: DEBUG nova.compute.provider_tree [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1672.252084] env[62405]: DEBUG oslo_vmware.rw_handles [None req-966cba6a-1b89-4866-bb58-34ddcb775da9 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52df19e7-1156-ce96-ea69-058aea306fd7/disk-0.vmdk. {{(pid=62405) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1672.254594] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6aa19211-46ea-4efd-993a-426df35fe590 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.266526] env[62405]: DEBUG oslo_vmware.rw_handles [None req-966cba6a-1b89-4866-bb58-34ddcb775da9 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52df19e7-1156-ce96-ea69-058aea306fd7/disk-0.vmdk is in state: ready. {{(pid=62405) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1672.266722] env[62405]: ERROR oslo_vmware.rw_handles [None req-966cba6a-1b89-4866-bb58-34ddcb775da9 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52df19e7-1156-ce96-ea69-058aea306fd7/disk-0.vmdk due to incomplete transfer. [ 1672.271024] env[62405]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-87dd7c9a-775b-440e-8d48-3fbf0a03928e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.272527] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Acquiring lock "refresh_cache-f0ca0d3d-cb2b-467b-a466-c270794055d7" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1672.272659] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Acquired lock "refresh_cache-f0ca0d3d-cb2b-467b-a466-c270794055d7" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1672.272812] env[62405]: DEBUG nova.network.neutron [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: f0ca0d3d-cb2b-467b-a466-c270794055d7] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1672.274208] env[62405]: DEBUG oslo_vmware.api [None req-393d79bc-b5e4-4999-9705-c3faff616f4f tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Task: {'id': task-1947291, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1672.282852] env[62405]: DEBUG oslo_concurrency.lockutils [None req-18ca5fbd-bc81-43c5-8f75-59042c3ac786 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Releasing lock "refresh_cache-23748dfd-7c60-41db-8acb-7b49cf1c27db" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1672.286889] env[62405]: DEBUG oslo_vmware.rw_handles [None req-966cba6a-1b89-4866-bb58-34ddcb775da9 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52df19e7-1156-ce96-ea69-058aea306fd7/disk-0.vmdk. {{(pid=62405) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1672.287073] env[62405]: DEBUG nova.virt.vmwareapi.images [None req-966cba6a-1b89-4866-bb58-34ddcb775da9 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59957a81-5297-43d3-a673-024a53a19116] Uploaded image 7eeaa30f-03d7-4843-a1e3-469c7b5b30d5 to the Glance image server {{(pid=62405) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1672.289496] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-966cba6a-1b89-4866-bb58-34ddcb775da9 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59957a81-5297-43d3-a673-024a53a19116] Destroying the VM {{(pid=62405) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1672.291565] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-d3871598-4d58-4a8f-8fb4-0fb1722dea86 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.309907] env[62405]: DEBUG oslo_vmware.api [None req-966cba6a-1b89-4866-bb58-34ddcb775da9 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Waiting for the task: (returnval){ [ 1672.309907] env[62405]: value = "task-1947295" [ 1672.309907] env[62405]: _type = "Task" [ 1672.309907] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1672.324405] env[62405]: DEBUG oslo_vmware.api [None req-966cba6a-1b89-4866-bb58-34ddcb775da9 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947295, 'name': Destroy_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1672.341565] env[62405]: DEBUG oslo_vmware.api [None req-85c5669c-3ad3-487a-b7d6-d43292c71dd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947292, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1672.551938] env[62405]: DEBUG oslo_vmware.api [None req-33f5353e-7e6d-44ea-9d5c-3648331601cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947293, 'name': PowerOffVM_Task, 'duration_secs': 0.351755} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1672.557228] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-33f5353e-7e6d-44ea-9d5c-3648331601cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 23748dfd-7c60-41db-8acb-7b49cf1c27db] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1672.557683] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-33f5353e-7e6d-44ea-9d5c-3648331601cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 23748dfd-7c60-41db-8acb-7b49cf1c27db] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1672.558728] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-23cb63eb-135a-4458-9b6b-c9f669f76f8b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.571521] env[62405]: DEBUG oslo_vmware.api [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Task: {'id': task-1947294, 'name': Rename_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1672.696272] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-33f5353e-7e6d-44ea-9d5c-3648331601cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 23748dfd-7c60-41db-8acb-7b49cf1c27db] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1672.696642] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-33f5353e-7e6d-44ea-9d5c-3648331601cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 23748dfd-7c60-41db-8acb-7b49cf1c27db] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1672.696878] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-33f5353e-7e6d-44ea-9d5c-3648331601cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Deleting the datastore file [datastore1] 23748dfd-7c60-41db-8acb-7b49cf1c27db {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1672.697439] env[62405]: INFO nova.compute.manager [-] [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] Took 2.30 seconds to deallocate network for instance. [ 1672.697730] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-621bddcd-7d67-43e4-aa9f-cc6f63f08d51 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.709125] env[62405]: DEBUG oslo_vmware.api [None req-33f5353e-7e6d-44ea-9d5c-3648331601cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Waiting for the task: (returnval){ [ 1672.709125] env[62405]: value = "task-1947297" [ 1672.709125] env[62405]: _type = "Task" [ 1672.709125] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1672.720449] env[62405]: DEBUG oslo_vmware.api [None req-33f5353e-7e6d-44ea-9d5c-3648331601cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947297, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1672.737943] env[62405]: DEBUG nova.scheduler.client.report [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1672.767223] env[62405]: DEBUG oslo_vmware.api [None req-393d79bc-b5e4-4999-9705-c3faff616f4f tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Task: {'id': task-1947291, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.600294} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1672.768300] env[62405]: INFO nova.virt.vmwareapi.ds_util [None req-393d79bc-b5e4-4999-9705-c3faff616f4f tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 6213702e-8e39-4342-b62f-2c9495017bf9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9-rescue.vmdk. [ 1672.770050] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7d46967-7c24-45b7-be23-3247efdcbf50 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.799234] env[62405]: DEBUG oslo_concurrency.lockutils [None req-18ca5fbd-bc81-43c5-8f75-59042c3ac786 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Lock "interface-23748dfd-7c60-41db-8acb-7b49cf1c27db-2a761bb8-a966-4a87-98b7-183fc71da74b" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 10.772s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1672.807520] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-393d79bc-b5e4-4999-9705-c3faff616f4f tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] [instance: 6213702e-8e39-4342-b62f-2c9495017bf9] Reconfiguring VM instance instance-00000030 to attach disk [datastore1] 6213702e-8e39-4342-b62f-2c9495017bf9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9-rescue.vmdk or device None with type thin {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1672.808410] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-222783bf-52ee-4ac6-9629-fe52ba5045c6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.831268] env[62405]: DEBUG oslo_vmware.api [None req-966cba6a-1b89-4866-bb58-34ddcb775da9 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947295, 'name': Destroy_Task} progress is 33%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1672.836397] env[62405]: DEBUG oslo_vmware.api [None req-393d79bc-b5e4-4999-9705-c3faff616f4f tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Waiting for the task: (returnval){ [ 1672.836397] env[62405]: value = "task-1947298" [ 1672.836397] env[62405]: _type = "Task" [ 1672.836397] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1672.844409] env[62405]: DEBUG oslo_vmware.api [None req-85c5669c-3ad3-487a-b7d6-d43292c71dd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947292, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.846426} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1672.845316] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-85c5669c-3ad3-487a-b7d6-d43292c71dd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] a6a0e918-425d-44de-a22b-8779e9108533/a6a0e918-425d-44de-a22b-8779e9108533.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1672.845443] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-85c5669c-3ad3-487a-b7d6-d43292c71dd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a6a0e918-425d-44de-a22b-8779e9108533] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1672.845901] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7f3e92af-e450-44f7-a873-306cf628b5a6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.851513] env[62405]: DEBUG oslo_vmware.api [None req-393d79bc-b5e4-4999-9705-c3faff616f4f tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Task: {'id': task-1947298, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1672.858251] env[62405]: DEBUG oslo_vmware.api [None req-85c5669c-3ad3-487a-b7d6-d43292c71dd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for the task: (returnval){ [ 1672.858251] env[62405]: value = "task-1947299" [ 1672.858251] env[62405]: _type = "Task" [ 1672.858251] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1672.871065] env[62405]: DEBUG oslo_vmware.api [None req-85c5669c-3ad3-487a-b7d6-d43292c71dd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947299, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1672.892015] env[62405]: DEBUG nova.network.neutron [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: f0ca0d3d-cb2b-467b-a466-c270794055d7] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1672.899699] env[62405]: DEBUG nova.network.neutron [-] [instance: 9b71f962-2b92-4f7b-bb8d-b50da5130018] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1673.012180] env[62405]: DEBUG nova.compute.manager [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] [instance: 34ec55c6-1a7a-4ffa-8efd-9eedd7495d44] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1673.039121] env[62405]: DEBUG nova.virt.hardware [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:21:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=1,extra_specs={hw_rng:allowed='True'},flavorid='1719892512',id=20,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_1-1127811900',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1673.039451] env[62405]: DEBUG nova.virt.hardware [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1673.039524] env[62405]: DEBUG nova.virt.hardware [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1673.039970] env[62405]: DEBUG nova.virt.hardware [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1673.039970] env[62405]: DEBUG nova.virt.hardware [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1673.039970] env[62405]: DEBUG nova.virt.hardware [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1673.040403] env[62405]: DEBUG nova.virt.hardware [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1673.040947] env[62405]: DEBUG nova.virt.hardware [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1673.040947] env[62405]: DEBUG nova.virt.hardware [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1673.041041] env[62405]: DEBUG nova.virt.hardware [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1673.041194] env[62405]: DEBUG nova.virt.hardware [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1673.042054] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a0b1a30-184b-4cd2-810e-05c532b2454e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.051564] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72de4822-af13-4f1a-9b30-1297fd766442 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.074069] env[62405]: DEBUG oslo_vmware.api [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Task: {'id': task-1947294, 'name': Rename_Task, 'duration_secs': 0.570945} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1673.074689] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] [instance: 377365a4-7538-4bab-a181-1940e6fb4066] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1673.074962] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ec82bc58-0e5b-462f-87db-9da74f917449 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.084111] env[62405]: DEBUG oslo_vmware.api [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Waiting for the task: (returnval){ [ 1673.084111] env[62405]: value = "task-1947300" [ 1673.084111] env[62405]: _type = "Task" [ 1673.084111] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1673.092426] env[62405]: DEBUG oslo_vmware.api [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Task: {'id': task-1947300, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1673.112053] env[62405]: DEBUG nova.network.neutron [-] [instance: a9f83357-4898-44ff-a6d8-ea6621453de9] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1673.148025] env[62405]: DEBUG nova.network.neutron [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: f0ca0d3d-cb2b-467b-a466-c270794055d7] Updating instance_info_cache with network_info: [{"id": "44befb6d-082c-47e0-9834-f3c7dc3d3210", "address": "fa:16:3e:08:ac:76", "network": {"id": "c3707aa8-0488-4e47-a655-7bb7788995c0", "bridge": "br-int", "label": "tempest-ServersTestJSON-2054002489-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ba2fba100b943a2a415ec37b9365388", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "68ec9c06-8680-4a41-abad-cddbd1f768c9", "external-id": "nsx-vlan-transportzone-883", "segmentation_id": 883, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap44befb6d-08", "ovs_interfaceid": "44befb6d-082c-47e0-9834-f3c7dc3d3210", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1673.173662] env[62405]: DEBUG nova.compute.manager [req-25413d79-cccf-4d27-9f6b-f92b639728ee req-34f73ac1-617d-42ea-accc-8cc0fc73b52c service nova] [instance: 9b71f962-2b92-4f7b-bb8d-b50da5130018] Received event network-vif-deleted-0805ecfc-d6ef-4bff-a3a1-0f8af74e57a0 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1673.220252] env[62405]: DEBUG oslo_vmware.api [None req-33f5353e-7e6d-44ea-9d5c-3648331601cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947297, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.202659} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1673.220436] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-33f5353e-7e6d-44ea-9d5c-3648331601cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1673.220565] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-33f5353e-7e6d-44ea-9d5c-3648331601cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 23748dfd-7c60-41db-8acb-7b49cf1c27db] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1673.221860] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-33f5353e-7e6d-44ea-9d5c-3648331601cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 23748dfd-7c60-41db-8acb-7b49cf1c27db] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1673.221860] env[62405]: INFO nova.compute.manager [None req-33f5353e-7e6d-44ea-9d5c-3648331601cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 23748dfd-7c60-41db-8acb-7b49cf1c27db] Took 1.23 seconds to destroy the instance on the hypervisor. [ 1673.221860] env[62405]: DEBUG oslo.service.loopingcall [None req-33f5353e-7e6d-44ea-9d5c-3648331601cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1673.221860] env[62405]: DEBUG nova.compute.manager [-] [instance: 23748dfd-7c60-41db-8acb-7b49cf1c27db] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1673.221860] env[62405]: DEBUG nova.network.neutron [-] [instance: 23748dfd-7c60-41db-8acb-7b49cf1c27db] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1673.246215] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.771s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1673.246215] env[62405]: DEBUG nova.compute.manager [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 6c6a3974-c87e-47ed-a025-d6221a8decd7] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1673.251616] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4a89d3de-d2c4-42d4-b2ca-8e873a196d16 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 33.454s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1673.251846] env[62405]: DEBUG nova.objects.instance [None req-4a89d3de-d2c4-42d4-b2ca-8e873a196d16 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] [instance: d5686d7c-a73f-4e02-8726-eab8221a0eae] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62405) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1673.262286] env[62405]: INFO nova.compute.manager [None req-3305b2f0-80e4-45d3-9655-4c486b59e110 tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] Took 0.56 seconds to detach 1 volumes for instance. [ 1673.265042] env[62405]: DEBUG nova.compute.manager [None req-3305b2f0-80e4-45d3-9655-4c486b59e110 tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] Deleting volume: 2c06d022-a782-4194-9dee-348bf3888516 {{(pid=62405) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 1673.335127] env[62405]: DEBUG oslo_vmware.api [None req-966cba6a-1b89-4866-bb58-34ddcb775da9 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947295, 'name': Destroy_Task} progress is 100%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1673.346764] env[62405]: DEBUG oslo_vmware.api [None req-393d79bc-b5e4-4999-9705-c3faff616f4f tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Task: {'id': task-1947298, 'name': ReconfigVM_Task, 'duration_secs': 0.374868} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1673.347298] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-393d79bc-b5e4-4999-9705-c3faff616f4f tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] [instance: 6213702e-8e39-4342-b62f-2c9495017bf9] Reconfigured VM instance instance-00000030 to attach disk [datastore1] 6213702e-8e39-4342-b62f-2c9495017bf9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9-rescue.vmdk or device None with type thin {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1673.348184] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-374ea87d-79fe-4123-8064-6d0c2e4b7cbb {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.385359] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eb4a0db5-f0de-44c8-95e0-803e15f73e56 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.403491] env[62405]: INFO nova.compute.manager [-] [instance: 9b71f962-2b92-4f7b-bb8d-b50da5130018] Took 2.12 seconds to deallocate network for instance. [ 1673.403856] env[62405]: DEBUG oslo_vmware.api [None req-85c5669c-3ad3-487a-b7d6-d43292c71dd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947299, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074145} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1673.411194] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-85c5669c-3ad3-487a-b7d6-d43292c71dd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a6a0e918-425d-44de-a22b-8779e9108533] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1673.411754] env[62405]: DEBUG oslo_vmware.api [None req-393d79bc-b5e4-4999-9705-c3faff616f4f tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Waiting for the task: (returnval){ [ 1673.411754] env[62405]: value = "task-1947302" [ 1673.411754] env[62405]: _type = "Task" [ 1673.411754] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1673.414081] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c48e58a-59db-41bc-a361-a0ebf423f9af {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.446336] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-85c5669c-3ad3-487a-b7d6-d43292c71dd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a6a0e918-425d-44de-a22b-8779e9108533] Reconfiguring VM instance instance-0000002f to attach disk [datastore1] a6a0e918-425d-44de-a22b-8779e9108533/a6a0e918-425d-44de-a22b-8779e9108533.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1673.452828] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e08cdf3a-a09c-4350-920e-94f8adff420a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.466859] env[62405]: DEBUG oslo_vmware.api [None req-393d79bc-b5e4-4999-9705-c3faff616f4f tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Task: {'id': task-1947302, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1673.475586] env[62405]: DEBUG oslo_vmware.api [None req-85c5669c-3ad3-487a-b7d6-d43292c71dd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for the task: (returnval){ [ 1673.475586] env[62405]: value = "task-1947303" [ 1673.475586] env[62405]: _type = "Task" [ 1673.475586] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1673.483089] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Acquiring lock "9b495caf-4394-40c0-b68f-d02c7d759a6a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1673.483351] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Lock "9b495caf-4394-40c0-b68f-d02c7d759a6a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1673.490695] env[62405]: DEBUG oslo_vmware.api [None req-85c5669c-3ad3-487a-b7d6-d43292c71dd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947303, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1673.516150] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Acquiring lock "dbb5dda5-5420-4d7b-8b32-152d51cb2fb9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1673.516406] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Lock "dbb5dda5-5420-4d7b-8b32-152d51cb2fb9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1673.596434] env[62405]: DEBUG oslo_vmware.api [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Task: {'id': task-1947300, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1673.616773] env[62405]: INFO nova.compute.manager [-] [instance: a9f83357-4898-44ff-a6d8-ea6621453de9] Took 1.58 seconds to deallocate network for instance. [ 1673.641706] env[62405]: DEBUG nova.compute.manager [req-d6a6820d-672e-47b8-aba7-686f1b883b4e req-593c84e8-48a1-4275-a3bf-bf876e93912c service nova] [instance: f0ca0d3d-cb2b-467b-a466-c270794055d7] Received event network-vif-plugged-44befb6d-082c-47e0-9834-f3c7dc3d3210 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1673.642778] env[62405]: DEBUG oslo_concurrency.lockutils [req-d6a6820d-672e-47b8-aba7-686f1b883b4e req-593c84e8-48a1-4275-a3bf-bf876e93912c service nova] Acquiring lock "f0ca0d3d-cb2b-467b-a466-c270794055d7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1673.644071] env[62405]: DEBUG oslo_concurrency.lockutils [req-d6a6820d-672e-47b8-aba7-686f1b883b4e req-593c84e8-48a1-4275-a3bf-bf876e93912c service nova] Lock "f0ca0d3d-cb2b-467b-a466-c270794055d7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1673.644494] env[62405]: DEBUG oslo_concurrency.lockutils [req-d6a6820d-672e-47b8-aba7-686f1b883b4e req-593c84e8-48a1-4275-a3bf-bf876e93912c service nova] Lock "f0ca0d3d-cb2b-467b-a466-c270794055d7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1673.644494] env[62405]: DEBUG nova.compute.manager [req-d6a6820d-672e-47b8-aba7-686f1b883b4e req-593c84e8-48a1-4275-a3bf-bf876e93912c service nova] [instance: f0ca0d3d-cb2b-467b-a466-c270794055d7] No waiting events found dispatching network-vif-plugged-44befb6d-082c-47e0-9834-f3c7dc3d3210 {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1673.644671] env[62405]: WARNING nova.compute.manager [req-d6a6820d-672e-47b8-aba7-686f1b883b4e req-593c84e8-48a1-4275-a3bf-bf876e93912c service nova] [instance: f0ca0d3d-cb2b-467b-a466-c270794055d7] Received unexpected event network-vif-plugged-44befb6d-082c-47e0-9834-f3c7dc3d3210 for instance with vm_state building and task_state spawning. [ 1673.644864] env[62405]: DEBUG nova.compute.manager [req-d6a6820d-672e-47b8-aba7-686f1b883b4e req-593c84e8-48a1-4275-a3bf-bf876e93912c service nova] [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] Received event network-vif-deleted-b5922da6-f3d2-478a-8756-ea7020186366 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1673.645063] env[62405]: DEBUG nova.compute.manager [req-d6a6820d-672e-47b8-aba7-686f1b883b4e req-593c84e8-48a1-4275-a3bf-bf876e93912c service nova] [instance: f0ca0d3d-cb2b-467b-a466-c270794055d7] Received event network-changed-44befb6d-082c-47e0-9834-f3c7dc3d3210 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1673.645220] env[62405]: DEBUG nova.compute.manager [req-d6a6820d-672e-47b8-aba7-686f1b883b4e req-593c84e8-48a1-4275-a3bf-bf876e93912c service nova] [instance: f0ca0d3d-cb2b-467b-a466-c270794055d7] Refreshing instance network info cache due to event network-changed-44befb6d-082c-47e0-9834-f3c7dc3d3210. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1673.645389] env[62405]: DEBUG oslo_concurrency.lockutils [req-d6a6820d-672e-47b8-aba7-686f1b883b4e req-593c84e8-48a1-4275-a3bf-bf876e93912c service nova] Acquiring lock "refresh_cache-f0ca0d3d-cb2b-467b-a466-c270794055d7" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1673.649639] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Releasing lock "refresh_cache-f0ca0d3d-cb2b-467b-a466-c270794055d7" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1673.650856] env[62405]: DEBUG nova.compute.manager [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: f0ca0d3d-cb2b-467b-a466-c270794055d7] Instance network_info: |[{"id": "44befb6d-082c-47e0-9834-f3c7dc3d3210", "address": "fa:16:3e:08:ac:76", "network": {"id": "c3707aa8-0488-4e47-a655-7bb7788995c0", "bridge": "br-int", "label": "tempest-ServersTestJSON-2054002489-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ba2fba100b943a2a415ec37b9365388", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "68ec9c06-8680-4a41-abad-cddbd1f768c9", "external-id": "nsx-vlan-transportzone-883", "segmentation_id": 883, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap44befb6d-08", "ovs_interfaceid": "44befb6d-082c-47e0-9834-f3c7dc3d3210", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1673.650856] env[62405]: DEBUG oslo_concurrency.lockutils [req-d6a6820d-672e-47b8-aba7-686f1b883b4e req-593c84e8-48a1-4275-a3bf-bf876e93912c service nova] Acquired lock "refresh_cache-f0ca0d3d-cb2b-467b-a466-c270794055d7" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1673.650856] env[62405]: DEBUG nova.network.neutron [req-d6a6820d-672e-47b8-aba7-686f1b883b4e req-593c84e8-48a1-4275-a3bf-bf876e93912c service nova] [instance: f0ca0d3d-cb2b-467b-a466-c270794055d7] Refreshing network info cache for port 44befb6d-082c-47e0-9834-f3c7dc3d3210 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1673.651586] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: f0ca0d3d-cb2b-467b-a466-c270794055d7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:08:ac:76', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '68ec9c06-8680-4a41-abad-cddbd1f768c9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '44befb6d-082c-47e0-9834-f3c7dc3d3210', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1673.659248] env[62405]: DEBUG oslo.service.loopingcall [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1673.660527] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f0ca0d3d-cb2b-467b-a466-c270794055d7] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1673.660767] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-edd74bb2-8eef-4707-925a-f9df49e030b2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.683531] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1673.683531] env[62405]: value = "task-1947304" [ 1673.683531] env[62405]: _type = "Task" [ 1673.683531] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1673.698685] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947304, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1673.753017] env[62405]: DEBUG nova.compute.utils [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1673.756034] env[62405]: DEBUG nova.compute.manager [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 6c6a3974-c87e-47ed-a025-d6221a8decd7] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1673.756293] env[62405]: DEBUG nova.network.neutron [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 6c6a3974-c87e-47ed-a025-d6221a8decd7] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1673.825548] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3305b2f0-80e4-45d3-9655-4c486b59e110 tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1673.836284] env[62405]: DEBUG oslo_vmware.api [None req-966cba6a-1b89-4866-bb58-34ddcb775da9 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947295, 'name': Destroy_Task} progress is 100%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1673.858819] env[62405]: DEBUG nova.policy [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f5f866535fb94dd0b0ddddddd7da60b2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '41626e27199f4370a2554bb243a72d41', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1673.875371] env[62405]: DEBUG nova.network.neutron [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] [instance: 34ec55c6-1a7a-4ffa-8efd-9eedd7495d44] Successfully updated port: 6afd5e2e-fe5f-4f25-a879-a25672a67740 {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1673.914127] env[62405]: DEBUG oslo_concurrency.lockutils [None req-128beb1f-ca94-4320-aacd-f2b1988917b3 tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1673.932921] env[62405]: DEBUG oslo_vmware.api [None req-393d79bc-b5e4-4999-9705-c3faff616f4f tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Task: {'id': task-1947302, 'name': ReconfigVM_Task, 'duration_secs': 0.171772} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1673.933288] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-393d79bc-b5e4-4999-9705-c3faff616f4f tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] [instance: 6213702e-8e39-4342-b62f-2c9495017bf9] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1673.934014] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c2e2338d-3a58-4140-870f-2f6294695470 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.946095] env[62405]: DEBUG oslo_vmware.api [None req-393d79bc-b5e4-4999-9705-c3faff616f4f tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Waiting for the task: (returnval){ [ 1673.946095] env[62405]: value = "task-1947305" [ 1673.946095] env[62405]: _type = "Task" [ 1673.946095] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1673.959459] env[62405]: DEBUG oslo_vmware.api [None req-393d79bc-b5e4-4999-9705-c3faff616f4f tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Task: {'id': task-1947305, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1673.989446] env[62405]: DEBUG nova.compute.manager [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: 9b495caf-4394-40c0-b68f-d02c7d759a6a] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1673.990379] env[62405]: DEBUG oslo_vmware.api [None req-85c5669c-3ad3-487a-b7d6-d43292c71dd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947303, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1674.018940] env[62405]: DEBUG nova.compute.manager [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: dbb5dda5-5420-4d7b-8b32-152d51cb2fb9] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1674.095115] env[62405]: DEBUG oslo_vmware.api [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Task: {'id': task-1947300, 'name': PowerOnVM_Task, 'duration_secs': 0.519582} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1674.095414] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] [instance: 377365a4-7538-4bab-a181-1940e6fb4066] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1674.095624] env[62405]: INFO nova.compute.manager [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] [instance: 377365a4-7538-4bab-a181-1940e6fb4066] Took 9.30 seconds to spawn the instance on the hypervisor. [ 1674.096125] env[62405]: DEBUG nova.compute.manager [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] [instance: 377365a4-7538-4bab-a181-1940e6fb4066] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1674.097011] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8b17aa9-08e2-4450-8d46-8e3b71d88c76 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.128242] env[62405]: DEBUG oslo_concurrency.lockutils [None req-39a76771-768e-4c4e-92eb-e674f5cca5fb tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1674.195232] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947304, 'name': CreateVM_Task, 'duration_secs': 0.395428} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1674.195409] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f0ca0d3d-cb2b-467b-a466-c270794055d7] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1674.196154] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1674.196339] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1674.196671] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1674.196923] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c8a381ff-bd2c-4731-9963-6cbd90ec22ca {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.203916] env[62405]: DEBUG oslo_vmware.api [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Waiting for the task: (returnval){ [ 1674.203916] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]523f4047-cd77-95f4-3442-a4d29f2125d6" [ 1674.203916] env[62405]: _type = "Task" [ 1674.203916] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1674.213017] env[62405]: DEBUG oslo_vmware.api [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]523f4047-cd77-95f4-3442-a4d29f2125d6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1674.264440] env[62405]: DEBUG nova.compute.manager [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 6c6a3974-c87e-47ed-a025-d6221a8decd7] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1674.269303] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4a89d3de-d2c4-42d4-b2ca-8e873a196d16 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.017s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1674.270525] env[62405]: DEBUG oslo_concurrency.lockutils [None req-43bee4dd-8b8b-459f-bc1c-00470ecac135 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.596s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1674.270794] env[62405]: DEBUG nova.objects.instance [None req-43bee4dd-8b8b-459f-bc1c-00470ecac135 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Lazy-loading 'resources' on Instance uuid 8ea521a4-7d4c-4f0f-9bf6-44b8559eefd6 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1674.339798] env[62405]: DEBUG oslo_vmware.api [None req-966cba6a-1b89-4866-bb58-34ddcb775da9 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947295, 'name': Destroy_Task, 'duration_secs': 1.619933} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1674.340130] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-966cba6a-1b89-4866-bb58-34ddcb775da9 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59957a81-5297-43d3-a673-024a53a19116] Destroyed the VM [ 1674.340391] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-966cba6a-1b89-4866-bb58-34ddcb775da9 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59957a81-5297-43d3-a673-024a53a19116] Deleting Snapshot of the VM instance {{(pid=62405) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1674.340917] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-eb0768e5-f761-4767-bafa-c3be46586792 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.350625] env[62405]: DEBUG oslo_vmware.api [None req-966cba6a-1b89-4866-bb58-34ddcb775da9 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Waiting for the task: (returnval){ [ 1674.350625] env[62405]: value = "task-1947306" [ 1674.350625] env[62405]: _type = "Task" [ 1674.350625] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1674.363778] env[62405]: DEBUG oslo_vmware.api [None req-966cba6a-1b89-4866-bb58-34ddcb775da9 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947306, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1674.376377] env[62405]: DEBUG oslo_concurrency.lockutils [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Acquiring lock "refresh_cache-34ec55c6-1a7a-4ffa-8efd-9eedd7495d44" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1674.376540] env[62405]: DEBUG oslo_concurrency.lockutils [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Acquired lock "refresh_cache-34ec55c6-1a7a-4ffa-8efd-9eedd7495d44" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1674.376700] env[62405]: DEBUG nova.network.neutron [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] [instance: 34ec55c6-1a7a-4ffa-8efd-9eedd7495d44] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1674.460110] env[62405]: DEBUG oslo_vmware.api [None req-393d79bc-b5e4-4999-9705-c3faff616f4f tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Task: {'id': task-1947305, 'name': PowerOnVM_Task, 'duration_secs': 0.502116} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1674.461032] env[62405]: DEBUG nova.network.neutron [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 6c6a3974-c87e-47ed-a025-d6221a8decd7] Successfully created port: 262764f7-a6da-4d37-a804-a4b6719d4a50 {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1674.462872] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-393d79bc-b5e4-4999-9705-c3faff616f4f tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] [instance: 6213702e-8e39-4342-b62f-2c9495017bf9] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1674.466084] env[62405]: DEBUG nova.compute.manager [None req-393d79bc-b5e4-4999-9705-c3faff616f4f tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] [instance: 6213702e-8e39-4342-b62f-2c9495017bf9] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1674.470247] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ea7f14d-54bf-4c0d-822f-0b391c2f5335 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.494029] env[62405]: DEBUG oslo_vmware.api [None req-85c5669c-3ad3-487a-b7d6-d43292c71dd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947303, 'name': ReconfigVM_Task, 'duration_secs': 0.913773} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1674.494319] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-85c5669c-3ad3-487a-b7d6-d43292c71dd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a6a0e918-425d-44de-a22b-8779e9108533] Reconfigured VM instance instance-0000002f to attach disk [datastore1] a6a0e918-425d-44de-a22b-8779e9108533/a6a0e918-425d-44de-a22b-8779e9108533.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1674.496027] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8563ccd4-c67a-4c26-a5f0-aa14a5d53c9c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.509138] env[62405]: DEBUG oslo_vmware.api [None req-85c5669c-3ad3-487a-b7d6-d43292c71dd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for the task: (returnval){ [ 1674.509138] env[62405]: value = "task-1947307" [ 1674.509138] env[62405]: _type = "Task" [ 1674.509138] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1674.523338] env[62405]: DEBUG oslo_vmware.api [None req-85c5669c-3ad3-487a-b7d6-d43292c71dd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947307, 'name': Rename_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1674.527690] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1674.544113] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1674.595153] env[62405]: DEBUG nova.network.neutron [req-d6a6820d-672e-47b8-aba7-686f1b883b4e req-593c84e8-48a1-4275-a3bf-bf876e93912c service nova] [instance: f0ca0d3d-cb2b-467b-a466-c270794055d7] Updated VIF entry in instance network info cache for port 44befb6d-082c-47e0-9834-f3c7dc3d3210. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1674.595153] env[62405]: DEBUG nova.network.neutron [req-d6a6820d-672e-47b8-aba7-686f1b883b4e req-593c84e8-48a1-4275-a3bf-bf876e93912c service nova] [instance: f0ca0d3d-cb2b-467b-a466-c270794055d7] Updating instance_info_cache with network_info: [{"id": "44befb6d-082c-47e0-9834-f3c7dc3d3210", "address": "fa:16:3e:08:ac:76", "network": {"id": "c3707aa8-0488-4e47-a655-7bb7788995c0", "bridge": "br-int", "label": "tempest-ServersTestJSON-2054002489-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ba2fba100b943a2a415ec37b9365388", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "68ec9c06-8680-4a41-abad-cddbd1f768c9", "external-id": "nsx-vlan-transportzone-883", "segmentation_id": 883, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap44befb6d-08", "ovs_interfaceid": "44befb6d-082c-47e0-9834-f3c7dc3d3210", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1674.617582] env[62405]: INFO nova.compute.manager [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] [instance: 377365a4-7538-4bab-a181-1940e6fb4066] Took 43.34 seconds to build instance. [ 1674.716559] env[62405]: DEBUG oslo_vmware.api [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]523f4047-cd77-95f4-3442-a4d29f2125d6, 'name': SearchDatastore_Task, 'duration_secs': 0.014372} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1674.716832] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1674.717086] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: f0ca0d3d-cb2b-467b-a466-c270794055d7] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1674.717335] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1674.717540] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1674.717676] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1674.717923] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4e321268-d415-474d-bbca-dee0e3cd448c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.727953] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1674.728320] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1674.729874] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d094d91b-ab01-43fa-98da-a2b108a52919 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.736514] env[62405]: DEBUG oslo_vmware.api [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Waiting for the task: (returnval){ [ 1674.736514] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]525dbbee-f9dd-6048-18d1-80c1ddbd0983" [ 1674.736514] env[62405]: _type = "Task" [ 1674.736514] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1674.746379] env[62405]: DEBUG oslo_vmware.api [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]525dbbee-f9dd-6048-18d1-80c1ddbd0983, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1674.763293] env[62405]: DEBUG nova.network.neutron [-] [instance: 23748dfd-7c60-41db-8acb-7b49cf1c27db] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1674.864774] env[62405]: DEBUG oslo_vmware.api [None req-966cba6a-1b89-4866-bb58-34ddcb775da9 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947306, 'name': RemoveSnapshot_Task, 'duration_secs': 0.475337} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1674.865130] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-966cba6a-1b89-4866-bb58-34ddcb775da9 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59957a81-5297-43d3-a673-024a53a19116] Deleted Snapshot of the VM instance {{(pid=62405) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1674.865400] env[62405]: INFO nova.compute.manager [None req-966cba6a-1b89-4866-bb58-34ddcb775da9 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59957a81-5297-43d3-a673-024a53a19116] Took 15.89 seconds to snapshot the instance on the hypervisor. [ 1674.937817] env[62405]: DEBUG nova.network.neutron [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] [instance: 34ec55c6-1a7a-4ffa-8efd-9eedd7495d44] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1675.021817] env[62405]: DEBUG oslo_vmware.api [None req-85c5669c-3ad3-487a-b7d6-d43292c71dd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947307, 'name': Rename_Task, 'duration_secs': 0.151875} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1675.024556] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-85c5669c-3ad3-487a-b7d6-d43292c71dd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a6a0e918-425d-44de-a22b-8779e9108533] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1675.025254] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9511c1ba-4cb4-4aea-bdbf-331baf36a814 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.040660] env[62405]: DEBUG oslo_vmware.api [None req-85c5669c-3ad3-487a-b7d6-d43292c71dd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for the task: (returnval){ [ 1675.040660] env[62405]: value = "task-1947308" [ 1675.040660] env[62405]: _type = "Task" [ 1675.040660] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1675.053236] env[62405]: DEBUG oslo_vmware.api [None req-85c5669c-3ad3-487a-b7d6-d43292c71dd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947308, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1675.098615] env[62405]: DEBUG oslo_concurrency.lockutils [req-d6a6820d-672e-47b8-aba7-686f1b883b4e req-593c84e8-48a1-4275-a3bf-bf876e93912c service nova] Releasing lock "refresh_cache-f0ca0d3d-cb2b-467b-a466-c270794055d7" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1675.099135] env[62405]: DEBUG nova.compute.manager [req-d6a6820d-672e-47b8-aba7-686f1b883b4e req-593c84e8-48a1-4275-a3bf-bf876e93912c service nova] [instance: a9f83357-4898-44ff-a6d8-ea6621453de9] Received event network-vif-deleted-5e6a4310-9a98-402b-bb12-b6ed546139b9 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1675.120631] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f4d0961d-d730-42c4-a208-e69f73f7320b tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Lock "377365a4-7538-4bab-a181-1940e6fb4066" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 66.134s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1675.121717] env[62405]: DEBUG nova.network.neutron [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] [instance: 34ec55c6-1a7a-4ffa-8efd-9eedd7495d44] Updating instance_info_cache with network_info: [{"id": "6afd5e2e-fe5f-4f25-a879-a25672a67740", "address": "fa:16:3e:31:fc:df", "network": {"id": "890b933d-5687-4c3b-aab8-4c8d68c71772", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-315909913-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "915d6ea5e5184efab9fbeda21e3b8a64", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "399f3826-705c-45f7-9fe0-3a08a945151a", "external-id": "nsx-vlan-transportzone-936", "segmentation_id": 936, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6afd5e2e-fe", "ovs_interfaceid": "6afd5e2e-fe5f-4f25-a879-a25672a67740", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1675.248121] env[62405]: DEBUG oslo_vmware.api [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]525dbbee-f9dd-6048-18d1-80c1ddbd0983, 'name': SearchDatastore_Task, 'duration_secs': 0.011768} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1675.252484] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-99e35d20-cd45-4f89-aab1-1e502bda5df3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.257832] env[62405]: DEBUG oslo_vmware.api [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Waiting for the task: (returnval){ [ 1675.257832] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52eb3a8f-4975-979d-883b-285e69a3f392" [ 1675.257832] env[62405]: _type = "Task" [ 1675.257832] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1675.265725] env[62405]: INFO nova.compute.manager [-] [instance: 23748dfd-7c60-41db-8acb-7b49cf1c27db] Took 2.04 seconds to deallocate network for instance. [ 1675.270749] env[62405]: DEBUG oslo_vmware.api [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52eb3a8f-4975-979d-883b-285e69a3f392, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1675.278042] env[62405]: DEBUG nova.compute.manager [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 6c6a3974-c87e-47ed-a025-d6221a8decd7] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1675.312846] env[62405]: DEBUG nova.virt.hardware [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1675.312931] env[62405]: DEBUG nova.virt.hardware [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1675.313086] env[62405]: DEBUG nova.virt.hardware [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1675.313282] env[62405]: DEBUG nova.virt.hardware [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1675.313431] env[62405]: DEBUG nova.virt.hardware [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1675.313578] env[62405]: DEBUG nova.virt.hardware [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1675.313784] env[62405]: DEBUG nova.virt.hardware [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1675.313945] env[62405]: DEBUG nova.virt.hardware [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1675.314136] env[62405]: DEBUG nova.virt.hardware [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1675.314302] env[62405]: DEBUG nova.virt.hardware [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1675.314473] env[62405]: DEBUG nova.virt.hardware [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1675.315533] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8d81d88-a5c2-4082-864c-dd544f40506e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.327665] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb7405e0-be00-495a-ac74-4a34efb9a412 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.377990] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3379b4d3-25d6-4507-8d9f-7259dd2f8658 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.386970] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-248907ff-b055-4ba6-bc81-15eb240fd3ba {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.429593] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e6b0512-387c-4486-8335-42e0f766ca16 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.439693] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2db8987c-6b6d-4df5-9c9e-970497c2454c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.458040] env[62405]: DEBUG nova.compute.provider_tree [None req-43bee4dd-8b8b-459f-bc1c-00470ecac135 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1675.555088] env[62405]: DEBUG oslo_vmware.api [None req-85c5669c-3ad3-487a-b7d6-d43292c71dd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947308, 'name': PowerOnVM_Task, 'duration_secs': 0.488876} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1675.556480] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-85c5669c-3ad3-487a-b7d6-d43292c71dd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a6a0e918-425d-44de-a22b-8779e9108533] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1675.557195] env[62405]: DEBUG nova.compute.manager [None req-85c5669c-3ad3-487a-b7d6-d43292c71dd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a6a0e918-425d-44de-a22b-8779e9108533] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1675.558068] env[62405]: DEBUG nova.compute.manager [req-ee141638-fb64-4395-ace5-206cac71dcf3 req-47db3096-420c-4930-b8b8-5104bc1d6eb9 service nova] [instance: 34ec55c6-1a7a-4ffa-8efd-9eedd7495d44] Received event network-vif-plugged-6afd5e2e-fe5f-4f25-a879-a25672a67740 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1675.558322] env[62405]: DEBUG oslo_concurrency.lockutils [req-ee141638-fb64-4395-ace5-206cac71dcf3 req-47db3096-420c-4930-b8b8-5104bc1d6eb9 service nova] Acquiring lock "34ec55c6-1a7a-4ffa-8efd-9eedd7495d44-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1675.558533] env[62405]: DEBUG oslo_concurrency.lockutils [req-ee141638-fb64-4395-ace5-206cac71dcf3 req-47db3096-420c-4930-b8b8-5104bc1d6eb9 service nova] Lock "34ec55c6-1a7a-4ffa-8efd-9eedd7495d44-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1675.558721] env[62405]: DEBUG oslo_concurrency.lockutils [req-ee141638-fb64-4395-ace5-206cac71dcf3 req-47db3096-420c-4930-b8b8-5104bc1d6eb9 service nova] Lock "34ec55c6-1a7a-4ffa-8efd-9eedd7495d44-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1675.558903] env[62405]: DEBUG nova.compute.manager [req-ee141638-fb64-4395-ace5-206cac71dcf3 req-47db3096-420c-4930-b8b8-5104bc1d6eb9 service nova] [instance: 34ec55c6-1a7a-4ffa-8efd-9eedd7495d44] No waiting events found dispatching network-vif-plugged-6afd5e2e-fe5f-4f25-a879-a25672a67740 {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1675.559089] env[62405]: WARNING nova.compute.manager [req-ee141638-fb64-4395-ace5-206cac71dcf3 req-47db3096-420c-4930-b8b8-5104bc1d6eb9 service nova] [instance: 34ec55c6-1a7a-4ffa-8efd-9eedd7495d44] Received unexpected event network-vif-plugged-6afd5e2e-fe5f-4f25-a879-a25672a67740 for instance with vm_state building and task_state spawning. [ 1675.559252] env[62405]: DEBUG nova.compute.manager [req-ee141638-fb64-4395-ace5-206cac71dcf3 req-47db3096-420c-4930-b8b8-5104bc1d6eb9 service nova] [instance: 34ec55c6-1a7a-4ffa-8efd-9eedd7495d44] Received event network-changed-6afd5e2e-fe5f-4f25-a879-a25672a67740 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1675.559443] env[62405]: DEBUG nova.compute.manager [req-ee141638-fb64-4395-ace5-206cac71dcf3 req-47db3096-420c-4930-b8b8-5104bc1d6eb9 service nova] [instance: 34ec55c6-1a7a-4ffa-8efd-9eedd7495d44] Refreshing instance network info cache due to event network-changed-6afd5e2e-fe5f-4f25-a879-a25672a67740. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1675.559628] env[62405]: DEBUG oslo_concurrency.lockutils [req-ee141638-fb64-4395-ace5-206cac71dcf3 req-47db3096-420c-4930-b8b8-5104bc1d6eb9 service nova] Acquiring lock "refresh_cache-34ec55c6-1a7a-4ffa-8efd-9eedd7495d44" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1675.560378] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46f0626b-0642-4fe1-8d2e-21d4f12fc157 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.622315] env[62405]: DEBUG oslo_concurrency.lockutils [None req-04461eed-6f7d-4d3b-a315-31ecc81db4df tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Acquiring lock "59957a81-5297-43d3-a673-024a53a19116" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1675.622766] env[62405]: DEBUG oslo_concurrency.lockutils [None req-04461eed-6f7d-4d3b-a315-31ecc81db4df tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Lock "59957a81-5297-43d3-a673-024a53a19116" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1675.622831] env[62405]: DEBUG oslo_concurrency.lockutils [None req-04461eed-6f7d-4d3b-a315-31ecc81db4df tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Acquiring lock "59957a81-5297-43d3-a673-024a53a19116-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1675.622980] env[62405]: DEBUG oslo_concurrency.lockutils [None req-04461eed-6f7d-4d3b-a315-31ecc81db4df tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Lock "59957a81-5297-43d3-a673-024a53a19116-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1675.623208] env[62405]: DEBUG oslo_concurrency.lockutils [None req-04461eed-6f7d-4d3b-a315-31ecc81db4df tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Lock "59957a81-5297-43d3-a673-024a53a19116-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1675.625491] env[62405]: DEBUG oslo_concurrency.lockutils [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Releasing lock "refresh_cache-34ec55c6-1a7a-4ffa-8efd-9eedd7495d44" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1675.625797] env[62405]: DEBUG nova.compute.manager [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] [instance: 34ec55c6-1a7a-4ffa-8efd-9eedd7495d44] Instance network_info: |[{"id": "6afd5e2e-fe5f-4f25-a879-a25672a67740", "address": "fa:16:3e:31:fc:df", "network": {"id": "890b933d-5687-4c3b-aab8-4c8d68c71772", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-315909913-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "915d6ea5e5184efab9fbeda21e3b8a64", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "399f3826-705c-45f7-9fe0-3a08a945151a", "external-id": "nsx-vlan-transportzone-936", "segmentation_id": 936, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6afd5e2e-fe", "ovs_interfaceid": "6afd5e2e-fe5f-4f25-a879-a25672a67740", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1675.626303] env[62405]: INFO nova.compute.manager [None req-04461eed-6f7d-4d3b-a315-31ecc81db4df tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59957a81-5297-43d3-a673-024a53a19116] Terminating instance [ 1675.627653] env[62405]: DEBUG oslo_concurrency.lockutils [req-ee141638-fb64-4395-ace5-206cac71dcf3 req-47db3096-420c-4930-b8b8-5104bc1d6eb9 service nova] Acquired lock "refresh_cache-34ec55c6-1a7a-4ffa-8efd-9eedd7495d44" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1675.627843] env[62405]: DEBUG nova.network.neutron [req-ee141638-fb64-4395-ace5-206cac71dcf3 req-47db3096-420c-4930-b8b8-5104bc1d6eb9 service nova] [instance: 34ec55c6-1a7a-4ffa-8efd-9eedd7495d44] Refreshing network info cache for port 6afd5e2e-fe5f-4f25-a879-a25672a67740 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1675.629381] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] [instance: 34ec55c6-1a7a-4ffa-8efd-9eedd7495d44] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:31:fc:df', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '399f3826-705c-45f7-9fe0-3a08a945151a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6afd5e2e-fe5f-4f25-a879-a25672a67740', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1675.638099] env[62405]: DEBUG oslo.service.loopingcall [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1675.640640] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 34ec55c6-1a7a-4ffa-8efd-9eedd7495d44] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1675.641046] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6f27b69d-f1a8-45cf-9a3b-dab791cdfa7a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.663334] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1675.663334] env[62405]: value = "task-1947309" [ 1675.663334] env[62405]: _type = "Task" [ 1675.663334] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1675.671931] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947309, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1675.770257] env[62405]: DEBUG oslo_vmware.api [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52eb3a8f-4975-979d-883b-285e69a3f392, 'name': SearchDatastore_Task, 'duration_secs': 0.018283} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1675.770653] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1675.770957] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] f0ca0d3d-cb2b-467b-a466-c270794055d7/f0ca0d3d-cb2b-467b-a466-c270794055d7.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1675.771290] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-03f6b47f-3631-422a-94a2-f5c3efa03866 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.778177] env[62405]: DEBUG oslo_concurrency.lockutils [None req-33f5353e-7e6d-44ea-9d5c-3648331601cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1675.780258] env[62405]: DEBUG oslo_vmware.api [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Waiting for the task: (returnval){ [ 1675.780258] env[62405]: value = "task-1947310" [ 1675.780258] env[62405]: _type = "Task" [ 1675.780258] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1675.792087] env[62405]: DEBUG oslo_vmware.api [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947310, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1675.962200] env[62405]: DEBUG nova.scheduler.client.report [None req-43bee4dd-8b8b-459f-bc1c-00470ecac135 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1676.083413] env[62405]: DEBUG oslo_concurrency.lockutils [None req-85c5669c-3ad3-487a-b7d6-d43292c71dd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1676.146016] env[62405]: DEBUG nova.compute.manager [None req-04461eed-6f7d-4d3b-a315-31ecc81db4df tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59957a81-5297-43d3-a673-024a53a19116] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1676.147022] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-04461eed-6f7d-4d3b-a315-31ecc81db4df tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59957a81-5297-43d3-a673-024a53a19116] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1676.149712] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95d1bb05-0792-407b-8982-eb27cc7d393f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.162155] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-04461eed-6f7d-4d3b-a315-31ecc81db4df tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59957a81-5297-43d3-a673-024a53a19116] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1676.162449] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0c16456e-c89f-4f8b-9cdf-58fbb625c2bc {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.176996] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947309, 'name': CreateVM_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1676.192492] env[62405]: DEBUG nova.network.neutron [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 6c6a3974-c87e-47ed-a025-d6221a8decd7] Successfully updated port: 262764f7-a6da-4d37-a804-a4b6719d4a50 {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1676.241271] env[62405]: DEBUG nova.compute.manager [req-5822ca68-0007-4216-b7f3-64155f72312e req-b7a684a2-1952-4390-bbda-0d8b1489c44a service nova] [instance: 23748dfd-7c60-41db-8acb-7b49cf1c27db] Received event network-vif-deleted-666e898c-754c-4b07-b0d9-dac2a9a5bc6d {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1676.292241] env[62405]: DEBUG oslo_vmware.api [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947310, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1676.371393] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-04461eed-6f7d-4d3b-a315-31ecc81db4df tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59957a81-5297-43d3-a673-024a53a19116] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1676.371684] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-04461eed-6f7d-4d3b-a315-31ecc81db4df tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59957a81-5297-43d3-a673-024a53a19116] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1676.371912] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-04461eed-6f7d-4d3b-a315-31ecc81db4df tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Deleting the datastore file [datastore1] 59957a81-5297-43d3-a673-024a53a19116 {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1676.372620] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-62b13b55-5034-4dcc-a65c-db509be4ddb8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.382702] env[62405]: DEBUG oslo_vmware.api [None req-04461eed-6f7d-4d3b-a315-31ecc81db4df tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Waiting for the task: (returnval){ [ 1676.382702] env[62405]: value = "task-1947312" [ 1676.382702] env[62405]: _type = "Task" [ 1676.382702] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1676.391820] env[62405]: DEBUG oslo_vmware.api [None req-04461eed-6f7d-4d3b-a315-31ecc81db4df tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947312, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1676.417488] env[62405]: DEBUG nova.network.neutron [req-ee141638-fb64-4395-ace5-206cac71dcf3 req-47db3096-420c-4930-b8b8-5104bc1d6eb9 service nova] [instance: 34ec55c6-1a7a-4ffa-8efd-9eedd7495d44] Updated VIF entry in instance network info cache for port 6afd5e2e-fe5f-4f25-a879-a25672a67740. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1676.417869] env[62405]: DEBUG nova.network.neutron [req-ee141638-fb64-4395-ace5-206cac71dcf3 req-47db3096-420c-4930-b8b8-5104bc1d6eb9 service nova] [instance: 34ec55c6-1a7a-4ffa-8efd-9eedd7495d44] Updating instance_info_cache with network_info: [{"id": "6afd5e2e-fe5f-4f25-a879-a25672a67740", "address": "fa:16:3e:31:fc:df", "network": {"id": "890b933d-5687-4c3b-aab8-4c8d68c71772", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-315909913-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "915d6ea5e5184efab9fbeda21e3b8a64", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "399f3826-705c-45f7-9fe0-3a08a945151a", "external-id": "nsx-vlan-transportzone-936", "segmentation_id": 936, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6afd5e2e-fe", "ovs_interfaceid": "6afd5e2e-fe5f-4f25-a879-a25672a67740", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1676.469710] env[62405]: DEBUG oslo_concurrency.lockutils [None req-43bee4dd-8b8b-459f-bc1c-00470ecac135 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.199s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1676.472312] env[62405]: DEBUG oslo_concurrency.lockutils [None req-341dd44c-33cc-4493-9805-6cc4286b9cf6 tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.090s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1676.472609] env[62405]: DEBUG nova.objects.instance [None req-341dd44c-33cc-4493-9805-6cc4286b9cf6 tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Lazy-loading 'resources' on Instance uuid 792cd2c8-a67d-4b16-93ab-722fcc8b622d {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1676.489993] env[62405]: INFO nova.scheduler.client.report [None req-43bee4dd-8b8b-459f-bc1c-00470ecac135 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Deleted allocations for instance 8ea521a4-7d4c-4f0f-9bf6-44b8559eefd6 [ 1676.641129] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9f69f501-45bd-4c8d-8379-84744a1a9aba tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Acquiring lock "79548471-56f8-410c-a664-d2242541cd2a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1676.641129] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9f69f501-45bd-4c8d-8379-84744a1a9aba tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Lock "79548471-56f8-410c-a664-d2242541cd2a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1676.676900] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947309, 'name': CreateVM_Task, 'duration_secs': 0.662182} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1676.680946] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 34ec55c6-1a7a-4ffa-8efd-9eedd7495d44] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1676.680946] env[62405]: DEBUG oslo_concurrency.lockutils [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1676.680946] env[62405]: DEBUG oslo_concurrency.lockutils [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1676.680946] env[62405]: DEBUG oslo_concurrency.lockutils [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1676.680946] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e76b1526-6f5b-4c18-82c1-7b1de64e805c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.684750] env[62405]: DEBUG oslo_vmware.api [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Waiting for the task: (returnval){ [ 1676.684750] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]524a8179-e7fe-0603-07b8-ec2aaa2fb48c" [ 1676.684750] env[62405]: _type = "Task" [ 1676.684750] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1676.695655] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquiring lock "refresh_cache-6c6a3974-c87e-47ed-a025-d6221a8decd7" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1676.696029] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquired lock "refresh_cache-6c6a3974-c87e-47ed-a025-d6221a8decd7" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1676.696350] env[62405]: DEBUG nova.network.neutron [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 6c6a3974-c87e-47ed-a025-d6221a8decd7] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1676.697745] env[62405]: DEBUG oslo_vmware.api [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]524a8179-e7fe-0603-07b8-ec2aaa2fb48c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1676.791451] env[62405]: DEBUG oslo_vmware.api [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947310, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.577431} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1676.791714] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] f0ca0d3d-cb2b-467b-a466-c270794055d7/f0ca0d3d-cb2b-467b-a466-c270794055d7.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1676.791928] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: f0ca0d3d-cb2b-467b-a466-c270794055d7] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1676.792337] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c6e36437-7a35-4c63-afd2-b4c338b11422 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.803458] env[62405]: DEBUG oslo_vmware.api [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Waiting for the task: (returnval){ [ 1676.803458] env[62405]: value = "task-1947313" [ 1676.803458] env[62405]: _type = "Task" [ 1676.803458] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1676.812873] env[62405]: DEBUG oslo_vmware.api [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947313, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1676.894547] env[62405]: DEBUG oslo_vmware.api [None req-04461eed-6f7d-4d3b-a315-31ecc81db4df tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947312, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.199462} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1676.894837] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-04461eed-6f7d-4d3b-a315-31ecc81db4df tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1676.895086] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-04461eed-6f7d-4d3b-a315-31ecc81db4df tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59957a81-5297-43d3-a673-024a53a19116] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1676.895304] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-04461eed-6f7d-4d3b-a315-31ecc81db4df tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59957a81-5297-43d3-a673-024a53a19116] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1676.895474] env[62405]: INFO nova.compute.manager [None req-04461eed-6f7d-4d3b-a315-31ecc81db4df tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59957a81-5297-43d3-a673-024a53a19116] Took 0.75 seconds to destroy the instance on the hypervisor. [ 1676.895726] env[62405]: DEBUG oslo.service.loopingcall [None req-04461eed-6f7d-4d3b-a315-31ecc81db4df tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1676.895942] env[62405]: DEBUG nova.compute.manager [-] [instance: 59957a81-5297-43d3-a673-024a53a19116] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1676.896581] env[62405]: DEBUG nova.network.neutron [-] [instance: 59957a81-5297-43d3-a673-024a53a19116] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1676.920619] env[62405]: DEBUG oslo_concurrency.lockutils [req-ee141638-fb64-4395-ace5-206cac71dcf3 req-47db3096-420c-4930-b8b8-5104bc1d6eb9 service nova] Releasing lock "refresh_cache-34ec55c6-1a7a-4ffa-8efd-9eedd7495d44" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1677.007069] env[62405]: DEBUG oslo_concurrency.lockutils [None req-43bee4dd-8b8b-459f-bc1c-00470ecac135 tempest-ImagesNegativeTestJSON-110130304 tempest-ImagesNegativeTestJSON-110130304-project-member] Lock "8ea521a4-7d4c-4f0f-9bf6-44b8559eefd6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.317s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1677.146280] env[62405]: DEBUG nova.compute.manager [None req-9f69f501-45bd-4c8d-8379-84744a1a9aba tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 79548471-56f8-410c-a664-d2242541cd2a] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1677.203242] env[62405]: DEBUG oslo_vmware.api [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]524a8179-e7fe-0603-07b8-ec2aaa2fb48c, 'name': SearchDatastore_Task, 'duration_secs': 0.011883} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1677.203764] env[62405]: DEBUG oslo_concurrency.lockutils [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1677.204108] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] [instance: 34ec55c6-1a7a-4ffa-8efd-9eedd7495d44] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1677.204414] env[62405]: DEBUG oslo_concurrency.lockutils [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1677.204627] env[62405]: DEBUG oslo_concurrency.lockutils [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1677.204884] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1677.205500] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-00acbe83-1883-497a-a038-b7f872a70d60 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.224581] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1677.224865] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1677.225963] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4db85554-cf88-4bd8-98c6-a7d68877f5c4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.232235] env[62405]: DEBUG oslo_vmware.api [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Waiting for the task: (returnval){ [ 1677.232235] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]529deb8d-9173-0845-698f-90b6f047c454" [ 1677.232235] env[62405]: _type = "Task" [ 1677.232235] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1677.244795] env[62405]: DEBUG oslo_vmware.api [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]529deb8d-9173-0845-698f-90b6f047c454, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1677.265120] env[62405]: DEBUG nova.network.neutron [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 6c6a3974-c87e-47ed-a025-d6221a8decd7] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1677.317537] env[62405]: DEBUG oslo_vmware.api [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947313, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.134882} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1677.318244] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: f0ca0d3d-cb2b-467b-a466-c270794055d7] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1677.318687] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88ece99f-fc8c-42f3-bfff-ebe4bd2501f1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.349839] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: f0ca0d3d-cb2b-467b-a466-c270794055d7] Reconfiguring VM instance instance-00000032 to attach disk [datastore1] f0ca0d3d-cb2b-467b-a466-c270794055d7/f0ca0d3d-cb2b-467b-a466-c270794055d7.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1677.357848] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6004b573-7dc4-47f8-9159-92e2ead01979 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.384994] env[62405]: DEBUG oslo_vmware.api [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Waiting for the task: (returnval){ [ 1677.384994] env[62405]: value = "task-1947314" [ 1677.384994] env[62405]: _type = "Task" [ 1677.384994] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1677.398262] env[62405]: DEBUG oslo_vmware.api [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947314, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1677.570213] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e8df6f2-6e09-46d3-8575-25f76fc2c5af {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.580491] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-692b8e1d-cf3f-4928-9db9-52044a1daf39 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.617274] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a28c3263-6e72-43e7-8507-56a39d0992f7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.627110] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85cf39e9-b262-44ec-bec5-28d9050d94c9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.646039] env[62405]: DEBUG nova.compute.provider_tree [None req-341dd44c-33cc-4493-9805-6cc4286b9cf6 tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1677.667893] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9f69f501-45bd-4c8d-8379-84744a1a9aba tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1677.744238] env[62405]: DEBUG oslo_vmware.api [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]529deb8d-9173-0845-698f-90b6f047c454, 'name': SearchDatastore_Task, 'duration_secs': 0.028466} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1677.745140] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aa3f737e-d5a5-44d1-9347-ea0c04bfb3e6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.751589] env[62405]: DEBUG oslo_vmware.api [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Waiting for the task: (returnval){ [ 1677.751589] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52a5bb1a-f3dc-37e9-4d4e-ec22118daa23" [ 1677.751589] env[62405]: _type = "Task" [ 1677.751589] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1677.760615] env[62405]: DEBUG oslo_vmware.api [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52a5bb1a-f3dc-37e9-4d4e-ec22118daa23, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1677.791682] env[62405]: DEBUG nova.network.neutron [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 6c6a3974-c87e-47ed-a025-d6221a8decd7] Updating instance_info_cache with network_info: [{"id": "262764f7-a6da-4d37-a804-a4b6719d4a50", "address": "fa:16:3e:61:bc:57", "network": {"id": "7398b956-045a-440c-b8fc-34bad57dbc27", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1969082667-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "41626e27199f4370a2554bb243a72d41", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "50171613-b419-45e3-9ada-fcb6cd921428", "external-id": "nsx-vlan-transportzone-914", "segmentation_id": 914, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap262764f7-a6", "ovs_interfaceid": "262764f7-a6da-4d37-a804-a4b6719d4a50", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1677.877051] env[62405]: DEBUG nova.network.neutron [-] [instance: 59957a81-5297-43d3-a673-024a53a19116] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1677.897695] env[62405]: DEBUG oslo_vmware.api [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947314, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1677.904633] env[62405]: DEBUG nova.compute.manager [req-15cfad76-070b-44f2-8c05-7d16f515eed2 req-09596f5b-0dce-4f22-ade1-3e0945864b4b service nova] [instance: 6c6a3974-c87e-47ed-a025-d6221a8decd7] Received event network-vif-plugged-262764f7-a6da-4d37-a804-a4b6719d4a50 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1677.904860] env[62405]: DEBUG oslo_concurrency.lockutils [req-15cfad76-070b-44f2-8c05-7d16f515eed2 req-09596f5b-0dce-4f22-ade1-3e0945864b4b service nova] Acquiring lock "6c6a3974-c87e-47ed-a025-d6221a8decd7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1677.905105] env[62405]: DEBUG oslo_concurrency.lockutils [req-15cfad76-070b-44f2-8c05-7d16f515eed2 req-09596f5b-0dce-4f22-ade1-3e0945864b4b service nova] Lock "6c6a3974-c87e-47ed-a025-d6221a8decd7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1677.905345] env[62405]: DEBUG oslo_concurrency.lockutils [req-15cfad76-070b-44f2-8c05-7d16f515eed2 req-09596f5b-0dce-4f22-ade1-3e0945864b4b service nova] Lock "6c6a3974-c87e-47ed-a025-d6221a8decd7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1677.905464] env[62405]: DEBUG nova.compute.manager [req-15cfad76-070b-44f2-8c05-7d16f515eed2 req-09596f5b-0dce-4f22-ade1-3e0945864b4b service nova] [instance: 6c6a3974-c87e-47ed-a025-d6221a8decd7] No waiting events found dispatching network-vif-plugged-262764f7-a6da-4d37-a804-a4b6719d4a50 {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1677.905884] env[62405]: WARNING nova.compute.manager [req-15cfad76-070b-44f2-8c05-7d16f515eed2 req-09596f5b-0dce-4f22-ade1-3e0945864b4b service nova] [instance: 6c6a3974-c87e-47ed-a025-d6221a8decd7] Received unexpected event network-vif-plugged-262764f7-a6da-4d37-a804-a4b6719d4a50 for instance with vm_state building and task_state spawning. [ 1677.905884] env[62405]: DEBUG nova.compute.manager [req-15cfad76-070b-44f2-8c05-7d16f515eed2 req-09596f5b-0dce-4f22-ade1-3e0945864b4b service nova] [instance: 6c6a3974-c87e-47ed-a025-d6221a8decd7] Received event network-changed-262764f7-a6da-4d37-a804-a4b6719d4a50 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1677.905884] env[62405]: DEBUG nova.compute.manager [req-15cfad76-070b-44f2-8c05-7d16f515eed2 req-09596f5b-0dce-4f22-ade1-3e0945864b4b service nova] [instance: 6c6a3974-c87e-47ed-a025-d6221a8decd7] Refreshing instance network info cache due to event network-changed-262764f7-a6da-4d37-a804-a4b6719d4a50. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1677.906203] env[62405]: DEBUG oslo_concurrency.lockutils [req-15cfad76-070b-44f2-8c05-7d16f515eed2 req-09596f5b-0dce-4f22-ade1-3e0945864b4b service nova] Acquiring lock "refresh_cache-6c6a3974-c87e-47ed-a025-d6221a8decd7" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1678.001596] env[62405]: DEBUG oslo_concurrency.lockutils [None req-069bf9a7-48de-4970-978f-4a79a8c4e109 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquiring lock "a6a0e918-425d-44de-a22b-8779e9108533" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1678.001945] env[62405]: DEBUG oslo_concurrency.lockutils [None req-069bf9a7-48de-4970-978f-4a79a8c4e109 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Lock "a6a0e918-425d-44de-a22b-8779e9108533" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1678.002236] env[62405]: DEBUG oslo_concurrency.lockutils [None req-069bf9a7-48de-4970-978f-4a79a8c4e109 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquiring lock "a6a0e918-425d-44de-a22b-8779e9108533-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1678.002470] env[62405]: DEBUG oslo_concurrency.lockutils [None req-069bf9a7-48de-4970-978f-4a79a8c4e109 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Lock "a6a0e918-425d-44de-a22b-8779e9108533-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1678.006656] env[62405]: DEBUG oslo_concurrency.lockutils [None req-069bf9a7-48de-4970-978f-4a79a8c4e109 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Lock "a6a0e918-425d-44de-a22b-8779e9108533-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.004s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1678.008604] env[62405]: INFO nova.compute.manager [None req-069bf9a7-48de-4970-978f-4a79a8c4e109 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a6a0e918-425d-44de-a22b-8779e9108533] Terminating instance [ 1678.150021] env[62405]: DEBUG nova.scheduler.client.report [None req-341dd44c-33cc-4493-9805-6cc4286b9cf6 tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1678.264999] env[62405]: DEBUG oslo_vmware.api [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52a5bb1a-f3dc-37e9-4d4e-ec22118daa23, 'name': SearchDatastore_Task, 'duration_secs': 0.067628} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1678.265556] env[62405]: DEBUG oslo_concurrency.lockutils [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1678.265892] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 34ec55c6-1a7a-4ffa-8efd-9eedd7495d44/34ec55c6-1a7a-4ffa-8efd-9eedd7495d44.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1678.266180] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a2017ebe-c08a-4735-b35a-c583679112c0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.276503] env[62405]: DEBUG oslo_vmware.api [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Waiting for the task: (returnval){ [ 1678.276503] env[62405]: value = "task-1947315" [ 1678.276503] env[62405]: _type = "Task" [ 1678.276503] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1678.286915] env[62405]: DEBUG oslo_vmware.api [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Task: {'id': task-1947315, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1678.294623] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Releasing lock "refresh_cache-6c6a3974-c87e-47ed-a025-d6221a8decd7" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1678.295023] env[62405]: DEBUG nova.compute.manager [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 6c6a3974-c87e-47ed-a025-d6221a8decd7] Instance network_info: |[{"id": "262764f7-a6da-4d37-a804-a4b6719d4a50", "address": "fa:16:3e:61:bc:57", "network": {"id": "7398b956-045a-440c-b8fc-34bad57dbc27", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1969082667-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "41626e27199f4370a2554bb243a72d41", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "50171613-b419-45e3-9ada-fcb6cd921428", "external-id": "nsx-vlan-transportzone-914", "segmentation_id": 914, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap262764f7-a6", "ovs_interfaceid": "262764f7-a6da-4d37-a804-a4b6719d4a50", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1678.295362] env[62405]: DEBUG oslo_concurrency.lockutils [req-15cfad76-070b-44f2-8c05-7d16f515eed2 req-09596f5b-0dce-4f22-ade1-3e0945864b4b service nova] Acquired lock "refresh_cache-6c6a3974-c87e-47ed-a025-d6221a8decd7" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1678.295542] env[62405]: DEBUG nova.network.neutron [req-15cfad76-070b-44f2-8c05-7d16f515eed2 req-09596f5b-0dce-4f22-ade1-3e0945864b4b service nova] [instance: 6c6a3974-c87e-47ed-a025-d6221a8decd7] Refreshing network info cache for port 262764f7-a6da-4d37-a804-a4b6719d4a50 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1678.296904] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 6c6a3974-c87e-47ed-a025-d6221a8decd7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:61:bc:57', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '50171613-b419-45e3-9ada-fcb6cd921428', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '262764f7-a6da-4d37-a804-a4b6719d4a50', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1678.305069] env[62405]: DEBUG oslo.service.loopingcall [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1678.305993] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6c6a3974-c87e-47ed-a025-d6221a8decd7] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1678.306321] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-181a32f0-d426-40bc-addb-f89e419d2157 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.332321] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1678.332321] env[62405]: value = "task-1947316" [ 1678.332321] env[62405]: _type = "Task" [ 1678.332321] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1678.343837] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947316, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1678.383459] env[62405]: INFO nova.compute.manager [-] [instance: 59957a81-5297-43d3-a673-024a53a19116] Took 1.49 seconds to deallocate network for instance. [ 1678.401589] env[62405]: DEBUG oslo_vmware.api [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947314, 'name': ReconfigVM_Task, 'duration_secs': 1.008247} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1678.401589] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: f0ca0d3d-cb2b-467b-a466-c270794055d7] Reconfigured VM instance instance-00000032 to attach disk [datastore1] f0ca0d3d-cb2b-467b-a466-c270794055d7/f0ca0d3d-cb2b-467b-a466-c270794055d7.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1678.401589] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d5f63a82-015a-4778-8d4b-307ab5289ca4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.411413] env[62405]: DEBUG oslo_vmware.api [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Waiting for the task: (returnval){ [ 1678.411413] env[62405]: value = "task-1947317" [ 1678.411413] env[62405]: _type = "Task" [ 1678.411413] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1678.422264] env[62405]: DEBUG oslo_vmware.api [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947317, 'name': Rename_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1678.513961] env[62405]: DEBUG nova.compute.manager [None req-069bf9a7-48de-4970-978f-4a79a8c4e109 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a6a0e918-425d-44de-a22b-8779e9108533] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1678.514306] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-069bf9a7-48de-4970-978f-4a79a8c4e109 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a6a0e918-425d-44de-a22b-8779e9108533] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1678.515330] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-326c99d9-f927-4a34-80fd-fd8e26f8d060 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.527022] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-069bf9a7-48de-4970-978f-4a79a8c4e109 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a6a0e918-425d-44de-a22b-8779e9108533] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1678.527159] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c65d87b5-438b-42a7-a501-e5e445c40b36 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.535693] env[62405]: DEBUG oslo_vmware.api [None req-069bf9a7-48de-4970-978f-4a79a8c4e109 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for the task: (returnval){ [ 1678.535693] env[62405]: value = "task-1947318" [ 1678.535693] env[62405]: _type = "Task" [ 1678.535693] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1678.546127] env[62405]: DEBUG oslo_vmware.api [None req-069bf9a7-48de-4970-978f-4a79a8c4e109 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947318, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1678.659524] env[62405]: DEBUG oslo_concurrency.lockutils [None req-341dd44c-33cc-4493-9805-6cc4286b9cf6 tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.187s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1678.666113] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4458c6a2-0bc9-4d4a-9354-48b1433d6fd3 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.292s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1678.666113] env[62405]: DEBUG nova.objects.instance [None req-4458c6a2-0bc9-4d4a-9354-48b1433d6fd3 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Lazy-loading 'resources' on Instance uuid d5686d7c-a73f-4e02-8726-eab8221a0eae {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1678.703155] env[62405]: INFO nova.scheduler.client.report [None req-341dd44c-33cc-4493-9805-6cc4286b9cf6 tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Deleted allocations for instance 792cd2c8-a67d-4b16-93ab-722fcc8b622d [ 1678.790199] env[62405]: DEBUG oslo_vmware.api [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Task: {'id': task-1947315, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1678.845689] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947316, 'name': CreateVM_Task} progress is 25%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1678.895875] env[62405]: DEBUG oslo_concurrency.lockutils [None req-04461eed-6f7d-4d3b-a315-31ecc81db4df tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1678.928398] env[62405]: DEBUG oslo_vmware.api [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947317, 'name': Rename_Task, 'duration_secs': 0.190962} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1678.928671] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: f0ca0d3d-cb2b-467b-a466-c270794055d7] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1678.928929] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-32d04935-d935-4131-8187-628fc6c3d6eb {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.940135] env[62405]: DEBUG oslo_vmware.api [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Waiting for the task: (returnval){ [ 1678.940135] env[62405]: value = "task-1947319" [ 1678.940135] env[62405]: _type = "Task" [ 1678.940135] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1678.953364] env[62405]: DEBUG oslo_vmware.api [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947319, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1679.051667] env[62405]: DEBUG oslo_vmware.api [None req-069bf9a7-48de-4970-978f-4a79a8c4e109 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947318, 'name': PowerOffVM_Task, 'duration_secs': 0.338413} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1679.052146] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-069bf9a7-48de-4970-978f-4a79a8c4e109 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a6a0e918-425d-44de-a22b-8779e9108533] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1679.052442] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-069bf9a7-48de-4970-978f-4a79a8c4e109 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a6a0e918-425d-44de-a22b-8779e9108533] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1679.052809] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-15530d0b-4b7e-4ecb-9490-e43ed51769fb {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.073210] env[62405]: DEBUG nova.compute.manager [req-631e8235-c4c2-43ef-a2be-ed2659e05ec8 req-c44582bf-2630-4c52-9698-44ae2c949078 service nova] [instance: 6213702e-8e39-4342-b62f-2c9495017bf9] Received event network-changed-f6fffc80-6395-4f72-8a63-b037918502c8 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1679.073490] env[62405]: DEBUG nova.compute.manager [req-631e8235-c4c2-43ef-a2be-ed2659e05ec8 req-c44582bf-2630-4c52-9698-44ae2c949078 service nova] [instance: 6213702e-8e39-4342-b62f-2c9495017bf9] Refreshing instance network info cache due to event network-changed-f6fffc80-6395-4f72-8a63-b037918502c8. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1679.073784] env[62405]: DEBUG oslo_concurrency.lockutils [req-631e8235-c4c2-43ef-a2be-ed2659e05ec8 req-c44582bf-2630-4c52-9698-44ae2c949078 service nova] Acquiring lock "refresh_cache-6213702e-8e39-4342-b62f-2c9495017bf9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1679.073948] env[62405]: DEBUG oslo_concurrency.lockutils [req-631e8235-c4c2-43ef-a2be-ed2659e05ec8 req-c44582bf-2630-4c52-9698-44ae2c949078 service nova] Acquired lock "refresh_cache-6213702e-8e39-4342-b62f-2c9495017bf9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1679.074149] env[62405]: DEBUG nova.network.neutron [req-631e8235-c4c2-43ef-a2be-ed2659e05ec8 req-c44582bf-2630-4c52-9698-44ae2c949078 service nova] [instance: 6213702e-8e39-4342-b62f-2c9495017bf9] Refreshing network info cache for port f6fffc80-6395-4f72-8a63-b037918502c8 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1679.216591] env[62405]: DEBUG oslo_concurrency.lockutils [None req-341dd44c-33cc-4493-9805-6cc4286b9cf6 tempest-VolumesAssistedSnapshotsTest-826881931 tempest-VolumesAssistedSnapshotsTest-826881931-project-member] Lock "792cd2c8-a67d-4b16-93ab-722fcc8b622d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.810s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1679.232686] env[62405]: DEBUG nova.network.neutron [req-15cfad76-070b-44f2-8c05-7d16f515eed2 req-09596f5b-0dce-4f22-ade1-3e0945864b4b service nova] [instance: 6c6a3974-c87e-47ed-a025-d6221a8decd7] Updated VIF entry in instance network info cache for port 262764f7-a6da-4d37-a804-a4b6719d4a50. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1679.233101] env[62405]: DEBUG nova.network.neutron [req-15cfad76-070b-44f2-8c05-7d16f515eed2 req-09596f5b-0dce-4f22-ade1-3e0945864b4b service nova] [instance: 6c6a3974-c87e-47ed-a025-d6221a8decd7] Updating instance_info_cache with network_info: [{"id": "262764f7-a6da-4d37-a804-a4b6719d4a50", "address": "fa:16:3e:61:bc:57", "network": {"id": "7398b956-045a-440c-b8fc-34bad57dbc27", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1969082667-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "41626e27199f4370a2554bb243a72d41", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "50171613-b419-45e3-9ada-fcb6cd921428", "external-id": "nsx-vlan-transportzone-914", "segmentation_id": 914, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap262764f7-a6", "ovs_interfaceid": "262764f7-a6da-4d37-a804-a4b6719d4a50", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1679.294098] env[62405]: DEBUG oslo_vmware.api [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Task: {'id': task-1947315, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.649358} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1679.295543] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 34ec55c6-1a7a-4ffa-8efd-9eedd7495d44/34ec55c6-1a7a-4ffa-8efd-9eedd7495d44.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1679.295775] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] [instance: 34ec55c6-1a7a-4ffa-8efd-9eedd7495d44] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1679.298961] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-dd79d70f-cde8-41d5-adc7-9a819b6e0166 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.302669] env[62405]: DEBUG oslo_concurrency.lockutils [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquiring lock "a1a84837-deef-4ffc-8a47-4891bfc2c87a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1679.303213] env[62405]: DEBUG oslo_concurrency.lockutils [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Lock "a1a84837-deef-4ffc-8a47-4891bfc2c87a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1679.313524] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-069bf9a7-48de-4970-978f-4a79a8c4e109 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a6a0e918-425d-44de-a22b-8779e9108533] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1679.313524] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-069bf9a7-48de-4970-978f-4a79a8c4e109 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a6a0e918-425d-44de-a22b-8779e9108533] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1679.313708] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-069bf9a7-48de-4970-978f-4a79a8c4e109 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Deleting the datastore file [datastore1] a6a0e918-425d-44de-a22b-8779e9108533 {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1679.313976] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6b5d9425-3c4c-4723-a8c6-fbc7e619b862 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.318047] env[62405]: DEBUG oslo_vmware.api [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Waiting for the task: (returnval){ [ 1679.318047] env[62405]: value = "task-1947321" [ 1679.318047] env[62405]: _type = "Task" [ 1679.318047] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1679.327221] env[62405]: DEBUG oslo_vmware.api [None req-069bf9a7-48de-4970-978f-4a79a8c4e109 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for the task: (returnval){ [ 1679.327221] env[62405]: value = "task-1947322" [ 1679.327221] env[62405]: _type = "Task" [ 1679.327221] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1679.334431] env[62405]: DEBUG oslo_vmware.api [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Task: {'id': task-1947321, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1679.341690] env[62405]: DEBUG oslo_vmware.api [None req-069bf9a7-48de-4970-978f-4a79a8c4e109 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947322, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1679.351624] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947316, 'name': CreateVM_Task, 'duration_secs': 0.869149} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1679.351624] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6c6a3974-c87e-47ed-a025-d6221a8decd7] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1679.351624] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1679.351624] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1679.351624] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1679.351624] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-95b66055-b3bb-49c4-bafa-1b569732bc28 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.356879] env[62405]: DEBUG oslo_vmware.api [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for the task: (returnval){ [ 1679.356879] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5238f1c2-b3ba-3cde-5f1a-c15043307368" [ 1679.356879] env[62405]: _type = "Task" [ 1679.356879] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1679.370220] env[62405]: DEBUG oslo_vmware.api [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5238f1c2-b3ba-3cde-5f1a-c15043307368, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1679.453795] env[62405]: DEBUG oslo_vmware.api [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947319, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1679.736178] env[62405]: DEBUG oslo_concurrency.lockutils [req-15cfad76-070b-44f2-8c05-7d16f515eed2 req-09596f5b-0dce-4f22-ade1-3e0945864b4b service nova] Releasing lock "refresh_cache-6c6a3974-c87e-47ed-a025-d6221a8decd7" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1679.736464] env[62405]: DEBUG nova.compute.manager [req-15cfad76-070b-44f2-8c05-7d16f515eed2 req-09596f5b-0dce-4f22-ade1-3e0945864b4b service nova] [instance: 59957a81-5297-43d3-a673-024a53a19116] Received event network-vif-deleted-740acbcf-c471-4523-a1ba-a92cc67c2990 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1679.736529] env[62405]: INFO nova.compute.manager [req-15cfad76-070b-44f2-8c05-7d16f515eed2 req-09596f5b-0dce-4f22-ade1-3e0945864b4b service nova] [instance: 59957a81-5297-43d3-a673-024a53a19116] Neutron deleted interface 740acbcf-c471-4523-a1ba-a92cc67c2990; detaching it from the instance and deleting it from the info cache [ 1679.736702] env[62405]: DEBUG nova.network.neutron [req-15cfad76-070b-44f2-8c05-7d16f515eed2 req-09596f5b-0dce-4f22-ade1-3e0945864b4b service nova] [instance: 59957a81-5297-43d3-a673-024a53a19116] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1679.767963] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8a7aa1d-4fd5-4a83-8a54-989e58a76ae2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.778708] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-119f9d61-c08b-4720-af4b-fae3685841a5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.812289] env[62405]: DEBUG nova.network.neutron [req-631e8235-c4c2-43ef-a2be-ed2659e05ec8 req-c44582bf-2630-4c52-9698-44ae2c949078 service nova] [instance: 6213702e-8e39-4342-b62f-2c9495017bf9] Updated VIF entry in instance network info cache for port f6fffc80-6395-4f72-8a63-b037918502c8. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1679.812652] env[62405]: DEBUG nova.network.neutron [req-631e8235-c4c2-43ef-a2be-ed2659e05ec8 req-c44582bf-2630-4c52-9698-44ae2c949078 service nova] [instance: 6213702e-8e39-4342-b62f-2c9495017bf9] Updating instance_info_cache with network_info: [{"id": "f6fffc80-6395-4f72-8a63-b037918502c8", "address": "fa:16:3e:e9:4c:f5", "network": {"id": "bdf0ffbc-8220-49ae-80a5-06dfea99bea9", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1271406419-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "024f8c817a3142b983afd4018e025452", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ff90ec9-3c7e-4e76-b409-fcf37fc588d8", "external-id": "nsx-vlan-transportzone-475", "segmentation_id": 475, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf6fffc80-63", "ovs_interfaceid": "f6fffc80-6395-4f72-8a63-b037918502c8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1679.814841] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8784077b-358f-4aa8-a626-4b5d50d9a505 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.832967] env[62405]: DEBUG oslo_vmware.api [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Task: {'id': task-1947321, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.09394} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1679.838011] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] [instance: 34ec55c6-1a7a-4ffa-8efd-9eedd7495d44] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1679.839093] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-710dfcef-3871-4907-96b7-5db8093cfca1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.842718] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9674150-266b-4b46-bc52-9d53e9c9376b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.854612] env[62405]: DEBUG oslo_vmware.api [None req-069bf9a7-48de-4970-978f-4a79a8c4e109 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947322, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1679.878164] env[62405]: DEBUG nova.compute.provider_tree [None req-4458c6a2-0bc9-4d4a-9354-48b1433d6fd3 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1679.891370] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] [instance: 34ec55c6-1a7a-4ffa-8efd-9eedd7495d44] Reconfiguring VM instance instance-00000033 to attach disk [datastore1] 34ec55c6-1a7a-4ffa-8efd-9eedd7495d44/34ec55c6-1a7a-4ffa-8efd-9eedd7495d44.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1679.895766] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ff708dc1-c8fb-4d7c-93fe-c35ae47cf8e1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.919584] env[62405]: DEBUG oslo_vmware.api [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5238f1c2-b3ba-3cde-5f1a-c15043307368, 'name': SearchDatastore_Task, 'duration_secs': 0.018112} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1679.921164] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1679.921424] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 6c6a3974-c87e-47ed-a025-d6221a8decd7] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1679.921686] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1679.921835] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1679.922036] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1679.922383] env[62405]: DEBUG oslo_vmware.api [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Waiting for the task: (returnval){ [ 1679.922383] env[62405]: value = "task-1947323" [ 1679.922383] env[62405]: _type = "Task" [ 1679.922383] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1679.922584] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-335c478d-0c39-401c-ad57-cd3d6a0e57f2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.938867] env[62405]: DEBUG oslo_vmware.api [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Task: {'id': task-1947323, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1679.941331] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1679.941660] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1679.942976] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d0aadfb8-eb66-44e1-abe4-622e1d5bfdeb {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.958826] env[62405]: DEBUG oslo_vmware.api [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for the task: (returnval){ [ 1679.958826] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52f9f854-f041-1ddf-c4ab-a36888b404ab" [ 1679.958826] env[62405]: _type = "Task" [ 1679.958826] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1679.964566] env[62405]: DEBUG oslo_vmware.api [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947319, 'name': PowerOnVM_Task, 'duration_secs': 0.934772} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1679.969258] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: f0ca0d3d-cb2b-467b-a466-c270794055d7] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1679.969497] env[62405]: INFO nova.compute.manager [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: f0ca0d3d-cb2b-467b-a466-c270794055d7] Took 10.12 seconds to spawn the instance on the hypervisor. [ 1679.969677] env[62405]: DEBUG nova.compute.manager [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: f0ca0d3d-cb2b-467b-a466-c270794055d7] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1679.970565] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9243c4ba-9bb3-49c3-8d64-85228da75046 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.984163] env[62405]: DEBUG oslo_vmware.api [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52f9f854-f041-1ddf-c4ab-a36888b404ab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1680.242087] env[62405]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e8472e7f-e73c-4441-8ade-0f49f369e0aa {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.252619] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f75b5937-99aa-4c7f-9bc8-5fa202531bab {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.291096] env[62405]: DEBUG nova.compute.manager [req-15cfad76-070b-44f2-8c05-7d16f515eed2 req-09596f5b-0dce-4f22-ade1-3e0945864b4b service nova] [instance: 59957a81-5297-43d3-a673-024a53a19116] Detach interface failed, port_id=740acbcf-c471-4523-a1ba-a92cc67c2990, reason: Instance 59957a81-5297-43d3-a673-024a53a19116 could not be found. {{(pid=62405) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11483}} [ 1680.322588] env[62405]: DEBUG oslo_concurrency.lockutils [req-631e8235-c4c2-43ef-a2be-ed2659e05ec8 req-c44582bf-2630-4c52-9698-44ae2c949078 service nova] Releasing lock "refresh_cache-6213702e-8e39-4342-b62f-2c9495017bf9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1680.342735] env[62405]: DEBUG oslo_vmware.api [None req-069bf9a7-48de-4970-978f-4a79a8c4e109 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947322, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.631528} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1680.345209] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-069bf9a7-48de-4970-978f-4a79a8c4e109 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1680.345209] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-069bf9a7-48de-4970-978f-4a79a8c4e109 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a6a0e918-425d-44de-a22b-8779e9108533] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1680.345209] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-069bf9a7-48de-4970-978f-4a79a8c4e109 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a6a0e918-425d-44de-a22b-8779e9108533] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1680.345209] env[62405]: INFO nova.compute.manager [None req-069bf9a7-48de-4970-978f-4a79a8c4e109 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a6a0e918-425d-44de-a22b-8779e9108533] Took 1.83 seconds to destroy the instance on the hypervisor. [ 1680.345209] env[62405]: DEBUG oslo.service.loopingcall [None req-069bf9a7-48de-4970-978f-4a79a8c4e109 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1680.345209] env[62405]: DEBUG nova.compute.manager [-] [instance: a6a0e918-425d-44de-a22b-8779e9108533] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1680.345209] env[62405]: DEBUG nova.network.neutron [-] [instance: a6a0e918-425d-44de-a22b-8779e9108533] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1680.395125] env[62405]: DEBUG nova.scheduler.client.report [None req-4458c6a2-0bc9-4d4a-9354-48b1433d6fd3 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1680.439848] env[62405]: DEBUG oslo_vmware.api [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Task: {'id': task-1947323, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1680.478021] env[62405]: DEBUG oslo_vmware.api [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52f9f854-f041-1ddf-c4ab-a36888b404ab, 'name': SearchDatastore_Task, 'duration_secs': 0.028581} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1680.478021] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0bf8e027-3554-4e20-9e70-1e5ef127ee7f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.491382] env[62405]: DEBUG oslo_vmware.api [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for the task: (returnval){ [ 1680.491382] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52e645b3-6eae-8fbb-dcf1-ea6fe5afa615" [ 1680.491382] env[62405]: _type = "Task" [ 1680.491382] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1680.498910] env[62405]: INFO nova.compute.manager [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: f0ca0d3d-cb2b-467b-a466-c270794055d7] Took 42.08 seconds to build instance. [ 1680.513759] env[62405]: DEBUG oslo_vmware.api [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52e645b3-6eae-8fbb-dcf1-ea6fe5afa615, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1680.899851] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4458c6a2-0bc9-4d4a-9354-48b1433d6fd3 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.235s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1680.903405] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.590s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1680.905114] env[62405]: INFO nova.compute.claims [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] [instance: b4693268-4d12-4c96-a8f9-7b1bb9705c89] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1680.943468] env[62405]: INFO nova.scheduler.client.report [None req-4458c6a2-0bc9-4d4a-9354-48b1433d6fd3 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Deleted allocations for instance d5686d7c-a73f-4e02-8726-eab8221a0eae [ 1680.948713] env[62405]: DEBUG oslo_vmware.api [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Task: {'id': task-1947323, 'name': ReconfigVM_Task, 'duration_secs': 0.978198} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1680.952449] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] [instance: 34ec55c6-1a7a-4ffa-8efd-9eedd7495d44] Reconfigured VM instance instance-00000033 to attach disk [datastore1] 34ec55c6-1a7a-4ffa-8efd-9eedd7495d44/34ec55c6-1a7a-4ffa-8efd-9eedd7495d44.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1680.953182] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Creating Virtual Disk of size 1048576 KB and adapter type paraVirtual on the data store {{(pid=62405) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1383}} [ 1680.954503] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CreateVirtualDisk_Task with opID=oslo.vmware-b63ca233-dc48-4875-b712-e6f0193f5b24 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.967209] env[62405]: DEBUG oslo_vmware.api [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Waiting for the task: (returnval){ [ 1680.967209] env[62405]: value = "task-1947324" [ 1680.967209] env[62405]: _type = "Task" [ 1680.967209] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1680.977083] env[62405]: DEBUG oslo_vmware.api [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Task: {'id': task-1947324, 'name': CreateVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1681.001491] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3040047d-7b21-49ad-9f81-8d400c74e52b tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Lock "f0ca0d3d-cb2b-467b-a466-c270794055d7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 56.917s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1681.010212] env[62405]: DEBUG oslo_vmware.api [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52e645b3-6eae-8fbb-dcf1-ea6fe5afa615, 'name': SearchDatastore_Task, 'duration_secs': 0.028482} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1681.013437] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1681.013694] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 6c6a3974-c87e-47ed-a025-d6221a8decd7/6c6a3974-c87e-47ed-a025-d6221a8decd7.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1681.014393] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d44fed31-cff3-4d18-8e50-4a13ec90acde {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.023669] env[62405]: DEBUG oslo_vmware.api [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for the task: (returnval){ [ 1681.023669] env[62405]: value = "task-1947325" [ 1681.023669] env[62405]: _type = "Task" [ 1681.023669] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1681.032832] env[62405]: DEBUG oslo_vmware.api [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947325, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1681.072620] env[62405]: DEBUG nova.network.neutron [-] [instance: a6a0e918-425d-44de-a22b-8779e9108533] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1681.456065] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4458c6a2-0bc9-4d4a-9354-48b1433d6fd3 tempest-ServerShowV254Test-1913842991 tempest-ServerShowV254Test-1913842991-project-member] Lock "d5686d7c-a73f-4e02-8726-eab8221a0eae" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.445s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1681.477490] env[62405]: DEBUG oslo_vmware.api [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Task: {'id': task-1947324, 'name': CreateVirtualDisk_Task, 'duration_secs': 0.079481} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1681.477774] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Created Virtual Disk of size 1048576 KB and type thin {{(pid=62405) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1404}} [ 1681.478634] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85d952d8-d2c6-4a0a-9637-f25bb5c552a8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.508913] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] [instance: 34ec55c6-1a7a-4ffa-8efd-9eedd7495d44] Reconfiguring VM instance instance-00000033 to attach disk [datastore1] 34ec55c6-1a7a-4ffa-8efd-9eedd7495d44/ephemeral_0.vmdk or device None with type thin {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1681.509441] env[62405]: DEBUG nova.compute.manager [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a1a84837-deef-4ffc-8a47-4891bfc2c87a] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1681.515980] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7d67d9af-a5d5-4c53-bbbf-487bfeae4e3d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.548611] env[62405]: DEBUG oslo_vmware.api [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947325, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1681.550085] env[62405]: DEBUG oslo_vmware.api [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Waiting for the task: (returnval){ [ 1681.550085] env[62405]: value = "task-1947326" [ 1681.550085] env[62405]: _type = "Task" [ 1681.550085] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1681.559036] env[62405]: DEBUG oslo_vmware.api [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Task: {'id': task-1947326, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1681.575487] env[62405]: INFO nova.compute.manager [-] [instance: a6a0e918-425d-44de-a22b-8779e9108533] Took 1.23 seconds to deallocate network for instance. [ 1682.036186] env[62405]: DEBUG oslo_concurrency.lockutils [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1682.051653] env[62405]: DEBUG oslo_vmware.api [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947325, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.541177} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1682.058510] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 6c6a3974-c87e-47ed-a025-d6221a8decd7/6c6a3974-c87e-47ed-a025-d6221a8decd7.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1682.058644] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 6c6a3974-c87e-47ed-a025-d6221a8decd7] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1682.060361] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ed581020-cc52-4431-8c71-d839b727e25d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.071058] env[62405]: DEBUG oslo_vmware.api [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Task: {'id': task-1947326, 'name': ReconfigVM_Task, 'duration_secs': 0.437727} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1682.072110] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] [instance: 34ec55c6-1a7a-4ffa-8efd-9eedd7495d44] Reconfigured VM instance instance-00000033 to attach disk [datastore1] 34ec55c6-1a7a-4ffa-8efd-9eedd7495d44/ephemeral_0.vmdk or device None with type thin {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1682.072110] env[62405]: DEBUG oslo_vmware.api [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for the task: (returnval){ [ 1682.072110] env[62405]: value = "task-1947327" [ 1682.072110] env[62405]: _type = "Task" [ 1682.072110] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1682.072110] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d84fd2d1-ee02-4288-97d5-0dedf9197387 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.082375] env[62405]: DEBUG oslo_concurrency.lockutils [None req-069bf9a7-48de-4970-978f-4a79a8c4e109 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1682.087846] env[62405]: DEBUG oslo_vmware.api [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947327, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1682.089984] env[62405]: DEBUG oslo_vmware.api [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Waiting for the task: (returnval){ [ 1682.089984] env[62405]: value = "task-1947328" [ 1682.089984] env[62405]: _type = "Task" [ 1682.089984] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1682.102803] env[62405]: DEBUG oslo_vmware.api [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Task: {'id': task-1947328, 'name': Rename_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1682.160415] env[62405]: DEBUG nova.compute.manager [req-90716d5e-029e-4078-becf-5d37daec09bc req-6107d0b0-4faa-470a-9757-60d8503dd863 service nova] [instance: a6a0e918-425d-44de-a22b-8779e9108533] Received event network-vif-deleted-d504fb4b-5637-4d63-aaa3-5273e3b34481 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1682.492015] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba731f9b-b9a5-4d36-88d8-90cfed8b14c7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.502505] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2203570-ffa9-4740-a7b2-21a7624e23b5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.541060] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09fa231c-08e3-4332-a04f-e92157abac1e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.551203] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b56b3f3-38ce-4be9-b3d1-14477d50fce6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.571884] env[62405]: DEBUG nova.compute.provider_tree [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1682.586661] env[62405]: DEBUG oslo_vmware.api [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947327, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.149203} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1682.586661] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 6c6a3974-c87e-47ed-a025-d6221a8decd7] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1682.586661] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c181160e-2a8b-41d1-b5c9-f6a0a1de8594 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.611660] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 6c6a3974-c87e-47ed-a025-d6221a8decd7] Reconfiguring VM instance instance-00000034 to attach disk [datastore1] 6c6a3974-c87e-47ed-a025-d6221a8decd7/6c6a3974-c87e-47ed-a025-d6221a8decd7.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1682.614433] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-63c5096b-db98-4718-b0ca-ce8aed440faf {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.630556] env[62405]: DEBUG oslo_vmware.api [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Task: {'id': task-1947328, 'name': Rename_Task, 'duration_secs': 0.188347} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1682.631191] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] [instance: 34ec55c6-1a7a-4ffa-8efd-9eedd7495d44] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1682.631432] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fbda7ccb-5ff4-4da6-bb08-2202ab775929 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.638260] env[62405]: DEBUG oslo_vmware.api [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for the task: (returnval){ [ 1682.638260] env[62405]: value = "task-1947329" [ 1682.638260] env[62405]: _type = "Task" [ 1682.638260] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1682.639559] env[62405]: DEBUG oslo_vmware.api [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Waiting for the task: (returnval){ [ 1682.639559] env[62405]: value = "task-1947330" [ 1682.639559] env[62405]: _type = "Task" [ 1682.639559] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1682.650703] env[62405]: DEBUG oslo_vmware.api [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Task: {'id': task-1947330, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1682.655025] env[62405]: DEBUG oslo_vmware.api [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947329, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1682.768888] env[62405]: DEBUG nova.compute.manager [req-7a95c56f-991e-40a7-a38b-c4f73059bd44 req-ceefd567-6387-4e37-a785-706b44263b9e service nova] [instance: 6213702e-8e39-4342-b62f-2c9495017bf9] Received event network-changed-f6fffc80-6395-4f72-8a63-b037918502c8 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1682.769760] env[62405]: DEBUG nova.compute.manager [req-7a95c56f-991e-40a7-a38b-c4f73059bd44 req-ceefd567-6387-4e37-a785-706b44263b9e service nova] [instance: 6213702e-8e39-4342-b62f-2c9495017bf9] Refreshing instance network info cache due to event network-changed-f6fffc80-6395-4f72-8a63-b037918502c8. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1682.770056] env[62405]: DEBUG oslo_concurrency.lockutils [req-7a95c56f-991e-40a7-a38b-c4f73059bd44 req-ceefd567-6387-4e37-a785-706b44263b9e service nova] Acquiring lock "refresh_cache-6213702e-8e39-4342-b62f-2c9495017bf9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1682.770235] env[62405]: DEBUG oslo_concurrency.lockutils [req-7a95c56f-991e-40a7-a38b-c4f73059bd44 req-ceefd567-6387-4e37-a785-706b44263b9e service nova] Acquired lock "refresh_cache-6213702e-8e39-4342-b62f-2c9495017bf9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1682.770426] env[62405]: DEBUG nova.network.neutron [req-7a95c56f-991e-40a7-a38b-c4f73059bd44 req-ceefd567-6387-4e37-a785-706b44263b9e service nova] [instance: 6213702e-8e39-4342-b62f-2c9495017bf9] Refreshing network info cache for port f6fffc80-6395-4f72-8a63-b037918502c8 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1683.081100] env[62405]: DEBUG nova.scheduler.client.report [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1683.156723] env[62405]: DEBUG oslo_vmware.api [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947329, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1683.160313] env[62405]: DEBUG oslo_vmware.api [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Task: {'id': task-1947330, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1683.243638] env[62405]: DEBUG nova.compute.manager [None req-27b60465-7ba5-442f-bd81-ad53bbfe7350 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1683.244757] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dbd50b7-2553-4b47-a5ae-f5c55499139f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.512698] env[62405]: DEBUG nova.network.neutron [req-7a95c56f-991e-40a7-a38b-c4f73059bd44 req-ceefd567-6387-4e37-a785-706b44263b9e service nova] [instance: 6213702e-8e39-4342-b62f-2c9495017bf9] Updated VIF entry in instance network info cache for port f6fffc80-6395-4f72-8a63-b037918502c8. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1683.518079] env[62405]: DEBUG nova.network.neutron [req-7a95c56f-991e-40a7-a38b-c4f73059bd44 req-ceefd567-6387-4e37-a785-706b44263b9e service nova] [instance: 6213702e-8e39-4342-b62f-2c9495017bf9] Updating instance_info_cache with network_info: [{"id": "f6fffc80-6395-4f72-8a63-b037918502c8", "address": "fa:16:3e:e9:4c:f5", "network": {"id": "bdf0ffbc-8220-49ae-80a5-06dfea99bea9", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1271406419-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.213", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "024f8c817a3142b983afd4018e025452", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ff90ec9-3c7e-4e76-b409-fcf37fc588d8", "external-id": "nsx-vlan-transportzone-475", "segmentation_id": 475, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf6fffc80-63", "ovs_interfaceid": "f6fffc80-6395-4f72-8a63-b037918502c8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1683.587625] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.685s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1683.588175] env[62405]: DEBUG nova.compute.manager [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] [instance: b4693268-4d12-4c96-a8f9-7b1bb9705c89] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1683.591934] env[62405]: DEBUG oslo_concurrency.lockutils [None req-af6579f0-3bb2-4b49-8579-38d377c84d6d tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 35.476s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1683.592351] env[62405]: DEBUG nova.objects.instance [None req-af6579f0-3bb2-4b49-8579-38d377c84d6d tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] [instance: 1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62405) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1683.660174] env[62405]: DEBUG oslo_vmware.api [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Task: {'id': task-1947330, 'name': PowerOnVM_Task, 'duration_secs': 0.765914} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1683.664866] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] [instance: 34ec55c6-1a7a-4ffa-8efd-9eedd7495d44] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1683.665226] env[62405]: INFO nova.compute.manager [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] [instance: 34ec55c6-1a7a-4ffa-8efd-9eedd7495d44] Took 10.65 seconds to spawn the instance on the hypervisor. [ 1683.665482] env[62405]: DEBUG nova.compute.manager [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] [instance: 34ec55c6-1a7a-4ffa-8efd-9eedd7495d44] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1683.666255] env[62405]: DEBUG oslo_vmware.api [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947329, 'name': ReconfigVM_Task, 'duration_secs': 0.644334} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1683.667045] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a9df35d-7a41-4a04-ba71-ac0c07171baf {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.670229] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 6c6a3974-c87e-47ed-a025-d6221a8decd7] Reconfigured VM instance instance-00000034 to attach disk [datastore1] 6c6a3974-c87e-47ed-a025-d6221a8decd7/6c6a3974-c87e-47ed-a025-d6221a8decd7.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1683.671205] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fc0ee03b-89f3-406d-8967-a48495798d8d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.683166] env[62405]: DEBUG oslo_vmware.api [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for the task: (returnval){ [ 1683.683166] env[62405]: value = "task-1947331" [ 1683.683166] env[62405]: _type = "Task" [ 1683.683166] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1683.694992] env[62405]: DEBUG oslo_vmware.api [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947331, 'name': Rename_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1683.735350] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Acquiring lock "d186b2f4-3fd1-44be-b8a4-080972aff3a0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1683.735563] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Lock "d186b2f4-3fd1-44be-b8a4-080972aff3a0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1683.760296] env[62405]: INFO nova.compute.manager [None req-27b60465-7ba5-442f-bd81-ad53bbfe7350 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] instance snapshotting [ 1683.760963] env[62405]: DEBUG nova.objects.instance [None req-27b60465-7ba5-442f-bd81-ad53bbfe7350 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Lazy-loading 'flavor' on Instance uuid 15218373-ffa5-49ce-b604-423b7fc5fb35 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1684.019729] env[62405]: DEBUG oslo_concurrency.lockutils [req-7a95c56f-991e-40a7-a38b-c4f73059bd44 req-ceefd567-6387-4e37-a785-706b44263b9e service nova] Releasing lock "refresh_cache-6213702e-8e39-4342-b62f-2c9495017bf9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1684.096120] env[62405]: DEBUG nova.compute.utils [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1684.104427] env[62405]: DEBUG nova.compute.manager [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] [instance: b4693268-4d12-4c96-a8f9-7b1bb9705c89] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1684.104529] env[62405]: DEBUG nova.network.neutron [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] [instance: b4693268-4d12-4c96-a8f9-7b1bb9705c89] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1684.200569] env[62405]: DEBUG nova.policy [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5534b907b3a9492ebed6ba5cd24a547c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7f3b7603d8d94bfeba8f26b6e99baae7', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1684.206091] env[62405]: INFO nova.compute.manager [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] [instance: 34ec55c6-1a7a-4ffa-8efd-9eedd7495d44] Took 44.80 seconds to build instance. [ 1684.214960] env[62405]: DEBUG oslo_vmware.api [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947331, 'name': Rename_Task, 'duration_secs': 0.293549} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1684.215275] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 6c6a3974-c87e-47ed-a025-d6221a8decd7] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1684.215522] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0d8be800-d338-4141-81a5-a8c47e565373 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.224472] env[62405]: DEBUG oslo_vmware.api [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for the task: (returnval){ [ 1684.224472] env[62405]: value = "task-1947332" [ 1684.224472] env[62405]: _type = "Task" [ 1684.224472] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1684.232904] env[62405]: DEBUG oslo_vmware.api [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947332, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1684.266601] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-851de718-4d18-4e79-8208-9556aeb717ad {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.290658] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ee3d8b0-be08-4a38-99b8-dfa02f431554 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.606262] env[62405]: DEBUG nova.compute.manager [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] [instance: b4693268-4d12-4c96-a8f9-7b1bb9705c89] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1684.611476] env[62405]: DEBUG oslo_concurrency.lockutils [None req-af6579f0-3bb2-4b49-8579-38d377c84d6d tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.019s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1684.612644] env[62405]: DEBUG oslo_concurrency.lockutils [None req-40310896-b13c-4525-845a-9678f76700fa tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.855s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1684.612943] env[62405]: DEBUG nova.objects.instance [None req-40310896-b13c-4525-845a-9678f76700fa tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Lazy-loading 'resources' on Instance uuid b8ff115b-64f1-4584-afa2-478c5e6b726b {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1684.676401] env[62405]: DEBUG nova.network.neutron [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] [instance: b4693268-4d12-4c96-a8f9-7b1bb9705c89] Successfully created port: a6d143aa-0ba8-425d-888b-e637ee77db16 {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1684.697463] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f349824b-af14-4f91-8e16-cda5a76aaccb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Acquiring lock "f0ca0d3d-cb2b-467b-a466-c270794055d7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1684.697735] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f349824b-af14-4f91-8e16-cda5a76aaccb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Lock "f0ca0d3d-cb2b-467b-a466-c270794055d7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1684.697959] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f349824b-af14-4f91-8e16-cda5a76aaccb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Acquiring lock "f0ca0d3d-cb2b-467b-a466-c270794055d7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1684.698166] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f349824b-af14-4f91-8e16-cda5a76aaccb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Lock "f0ca0d3d-cb2b-467b-a466-c270794055d7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1684.698663] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f349824b-af14-4f91-8e16-cda5a76aaccb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Lock "f0ca0d3d-cb2b-467b-a466-c270794055d7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1684.702722] env[62405]: INFO nova.compute.manager [None req-f349824b-af14-4f91-8e16-cda5a76aaccb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: f0ca0d3d-cb2b-467b-a466-c270794055d7] Terminating instance [ 1684.708609] env[62405]: DEBUG oslo_concurrency.lockutils [None req-54fd93a0-0311-4195-b12c-68418a49ec0b tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Lock "34ec55c6-1a7a-4ffa-8efd-9eedd7495d44" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 56.561s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1684.736442] env[62405]: DEBUG oslo_vmware.api [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947332, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1684.804528] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-27b60465-7ba5-442f-bd81-ad53bbfe7350 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Creating Snapshot of the VM instance {{(pid=62405) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1684.804804] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-25d48f23-fd86-4d8c-a561-168c4cdb036f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.815733] env[62405]: DEBUG oslo_vmware.api [None req-27b60465-7ba5-442f-bd81-ad53bbfe7350 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for the task: (returnval){ [ 1684.815733] env[62405]: value = "task-1947333" [ 1684.815733] env[62405]: _type = "Task" [ 1684.815733] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1684.825266] env[62405]: DEBUG oslo_vmware.api [None req-27b60465-7ba5-442f-bd81-ad53bbfe7350 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1947333, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1685.212595] env[62405]: DEBUG nova.compute.manager [None req-f349824b-af14-4f91-8e16-cda5a76aaccb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: f0ca0d3d-cb2b-467b-a466-c270794055d7] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1685.213071] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-f349824b-af14-4f91-8e16-cda5a76aaccb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: f0ca0d3d-cb2b-467b-a466-c270794055d7] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1685.213558] env[62405]: DEBUG nova.compute.manager [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1685.217424] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12330a39-0f84-4d81-96de-7e32badcda4b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.233824] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-f349824b-af14-4f91-8e16-cda5a76aaccb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: f0ca0d3d-cb2b-467b-a466-c270794055d7] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1685.235030] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cc14d264-e681-4e45-af5e-3519fccc4342 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.243682] env[62405]: DEBUG oslo_vmware.api [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947332, 'name': PowerOnVM_Task, 'duration_secs': 0.752434} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1685.245355] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 6c6a3974-c87e-47ed-a025-d6221a8decd7] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1685.245595] env[62405]: INFO nova.compute.manager [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 6c6a3974-c87e-47ed-a025-d6221a8decd7] Took 9.97 seconds to spawn the instance on the hypervisor. [ 1685.245968] env[62405]: DEBUG nova.compute.manager [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 6c6a3974-c87e-47ed-a025-d6221a8decd7] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1685.246111] env[62405]: DEBUG oslo_vmware.api [None req-f349824b-af14-4f91-8e16-cda5a76aaccb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Waiting for the task: (returnval){ [ 1685.246111] env[62405]: value = "task-1947334" [ 1685.246111] env[62405]: _type = "Task" [ 1685.246111] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1685.246824] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7f67127-68cd-46ef-a5d9-d6d1670273a1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.259614] env[62405]: DEBUG oslo_vmware.api [None req-f349824b-af14-4f91-8e16-cda5a76aaccb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947334, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1685.333123] env[62405]: DEBUG oslo_vmware.api [None req-27b60465-7ba5-442f-bd81-ad53bbfe7350 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1947333, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1685.423123] env[62405]: DEBUG nova.compute.manager [req-66fd8d8b-a167-47cf-85b1-23196b357859 req-dc070039-8fb3-4958-8c73-a9df74cd6835 service nova] [instance: 6213702e-8e39-4342-b62f-2c9495017bf9] Received event network-changed-f6fffc80-6395-4f72-8a63-b037918502c8 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1685.423600] env[62405]: DEBUG nova.compute.manager [req-66fd8d8b-a167-47cf-85b1-23196b357859 req-dc070039-8fb3-4958-8c73-a9df74cd6835 service nova] [instance: 6213702e-8e39-4342-b62f-2c9495017bf9] Refreshing instance network info cache due to event network-changed-f6fffc80-6395-4f72-8a63-b037918502c8. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1685.423600] env[62405]: DEBUG oslo_concurrency.lockutils [req-66fd8d8b-a167-47cf-85b1-23196b357859 req-dc070039-8fb3-4958-8c73-a9df74cd6835 service nova] Acquiring lock "refresh_cache-6213702e-8e39-4342-b62f-2c9495017bf9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1685.423758] env[62405]: DEBUG oslo_concurrency.lockutils [req-66fd8d8b-a167-47cf-85b1-23196b357859 req-dc070039-8fb3-4958-8c73-a9df74cd6835 service nova] Acquired lock "refresh_cache-6213702e-8e39-4342-b62f-2c9495017bf9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1685.423934] env[62405]: DEBUG nova.network.neutron [req-66fd8d8b-a167-47cf-85b1-23196b357859 req-dc070039-8fb3-4958-8c73-a9df74cd6835 service nova] [instance: 6213702e-8e39-4342-b62f-2c9495017bf9] Refreshing network info cache for port f6fffc80-6395-4f72-8a63-b037918502c8 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1685.617367] env[62405]: DEBUG nova.compute.manager [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] [instance: b4693268-4d12-4c96-a8f9-7b1bb9705c89] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1685.648855] env[62405]: DEBUG nova.virt.hardware [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1685.649082] env[62405]: DEBUG nova.virt.hardware [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1685.649251] env[62405]: DEBUG nova.virt.hardware [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1685.649434] env[62405]: DEBUG nova.virt.hardware [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1685.649583] env[62405]: DEBUG nova.virt.hardware [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1685.649732] env[62405]: DEBUG nova.virt.hardware [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1685.649953] env[62405]: DEBUG nova.virt.hardware [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1685.650361] env[62405]: DEBUG nova.virt.hardware [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1685.650574] env[62405]: DEBUG nova.virt.hardware [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1685.650745] env[62405]: DEBUG nova.virt.hardware [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1685.650924] env[62405]: DEBUG nova.virt.hardware [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1685.652361] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baa0ea4d-8917-4bea-9748-1f54bae8cd90 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.662426] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2043f39d-c322-42a1-8271-24e1e0519735 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.744611] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1685.758371] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c8befc2-df4e-41b6-a115-9400950dee09 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.764432] env[62405]: DEBUG oslo_vmware.api [None req-f349824b-af14-4f91-8e16-cda5a76aaccb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947334, 'name': PowerOffVM_Task, 'duration_secs': 0.314431} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1685.765157] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-f349824b-af14-4f91-8e16-cda5a76aaccb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: f0ca0d3d-cb2b-467b-a466-c270794055d7] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1685.765362] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-f349824b-af14-4f91-8e16-cda5a76aaccb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: f0ca0d3d-cb2b-467b-a466-c270794055d7] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1685.765592] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-56ea1dec-e435-4326-b0c9-0acd860ca451 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.770089] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64294546-de5a-4a19-8e08-346ac65f7c09 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.779638] env[62405]: INFO nova.compute.manager [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 6c6a3974-c87e-47ed-a025-d6221a8decd7] Took 46.10 seconds to build instance. [ 1685.819512] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c510f84-7da5-42e2-8a1d-35a100c04e2f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.835589] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6473193d-70bb-4219-8d85-d913b50f4aa8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.841758] env[62405]: DEBUG oslo_vmware.api [None req-27b60465-7ba5-442f-bd81-ad53bbfe7350 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1947333, 'name': CreateSnapshot_Task, 'duration_secs': 0.591613} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1685.842994] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-27b60465-7ba5-442f-bd81-ad53bbfe7350 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Created Snapshot of the VM instance {{(pid=62405) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1685.843761] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6e9bcdc-a09d-49a6-921e-abc01f715dec {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.848420] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-f349824b-af14-4f91-8e16-cda5a76aaccb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: f0ca0d3d-cb2b-467b-a466-c270794055d7] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1685.848824] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-f349824b-af14-4f91-8e16-cda5a76aaccb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: f0ca0d3d-cb2b-467b-a466-c270794055d7] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1685.848824] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-f349824b-af14-4f91-8e16-cda5a76aaccb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Deleting the datastore file [datastore1] f0ca0d3d-cb2b-467b-a466-c270794055d7 {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1685.857346] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b8b6680d-0a7d-48fe-b2d3-289fd2122b37 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.859940] env[62405]: DEBUG nova.compute.provider_tree [None req-40310896-b13c-4525-845a-9678f76700fa tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1685.875203] env[62405]: DEBUG oslo_vmware.api [None req-f349824b-af14-4f91-8e16-cda5a76aaccb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Waiting for the task: (returnval){ [ 1685.875203] env[62405]: value = "task-1947336" [ 1685.875203] env[62405]: _type = "Task" [ 1685.875203] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1685.884702] env[62405]: DEBUG oslo_vmware.api [None req-f349824b-af14-4f91-8e16-cda5a76aaccb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947336, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1686.244754] env[62405]: DEBUG nova.network.neutron [req-66fd8d8b-a167-47cf-85b1-23196b357859 req-dc070039-8fb3-4958-8c73-a9df74cd6835 service nova] [instance: 6213702e-8e39-4342-b62f-2c9495017bf9] Updated VIF entry in instance network info cache for port f6fffc80-6395-4f72-8a63-b037918502c8. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1686.245165] env[62405]: DEBUG nova.network.neutron [req-66fd8d8b-a167-47cf-85b1-23196b357859 req-dc070039-8fb3-4958-8c73-a9df74cd6835 service nova] [instance: 6213702e-8e39-4342-b62f-2c9495017bf9] Updating instance_info_cache with network_info: [{"id": "f6fffc80-6395-4f72-8a63-b037918502c8", "address": "fa:16:3e:e9:4c:f5", "network": {"id": "bdf0ffbc-8220-49ae-80a5-06dfea99bea9", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1271406419-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "024f8c817a3142b983afd4018e025452", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ff90ec9-3c7e-4e76-b409-fcf37fc588d8", "external-id": "nsx-vlan-transportzone-475", "segmentation_id": 475, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf6fffc80-63", "ovs_interfaceid": "f6fffc80-6395-4f72-8a63-b037918502c8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1686.317945] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2198cfd4-309b-4501-aee3-9983d4818315 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Lock "6c6a3974-c87e-47ed-a025-d6221a8decd7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 50.256s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1686.370641] env[62405]: DEBUG nova.scheduler.client.report [None req-40310896-b13c-4525-845a-9678f76700fa tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1686.380906] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-27b60465-7ba5-442f-bd81-ad53bbfe7350 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Creating linked-clone VM from snapshot {{(pid=62405) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1686.382412] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-7a7ed8fd-362e-4800-9b93-1ab2487c9329 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.397688] env[62405]: DEBUG oslo_vmware.api [None req-f349824b-af14-4f91-8e16-cda5a76aaccb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947336, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.216915} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1686.399263] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-f349824b-af14-4f91-8e16-cda5a76aaccb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1686.399693] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-f349824b-af14-4f91-8e16-cda5a76aaccb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: f0ca0d3d-cb2b-467b-a466-c270794055d7] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1686.399969] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-f349824b-af14-4f91-8e16-cda5a76aaccb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: f0ca0d3d-cb2b-467b-a466-c270794055d7] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1686.400085] env[62405]: INFO nova.compute.manager [None req-f349824b-af14-4f91-8e16-cda5a76aaccb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: f0ca0d3d-cb2b-467b-a466-c270794055d7] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1686.400511] env[62405]: DEBUG oslo.service.loopingcall [None req-f349824b-af14-4f91-8e16-cda5a76aaccb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1686.401572] env[62405]: DEBUG oslo_vmware.api [None req-27b60465-7ba5-442f-bd81-ad53bbfe7350 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for the task: (returnval){ [ 1686.401572] env[62405]: value = "task-1947337" [ 1686.401572] env[62405]: _type = "Task" [ 1686.401572] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1686.401572] env[62405]: DEBUG nova.compute.manager [-] [instance: f0ca0d3d-cb2b-467b-a466-c270794055d7] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1686.402771] env[62405]: DEBUG nova.network.neutron [-] [instance: f0ca0d3d-cb2b-467b-a466-c270794055d7] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1686.418713] env[62405]: DEBUG oslo_vmware.api [None req-27b60465-7ba5-442f-bd81-ad53bbfe7350 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1947337, 'name': CloneVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1686.750607] env[62405]: DEBUG oslo_concurrency.lockutils [req-66fd8d8b-a167-47cf-85b1-23196b357859 req-dc070039-8fb3-4958-8c73-a9df74cd6835 service nova] Releasing lock "refresh_cache-6213702e-8e39-4342-b62f-2c9495017bf9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1686.750607] env[62405]: DEBUG nova.compute.manager [req-66fd8d8b-a167-47cf-85b1-23196b357859 req-dc070039-8fb3-4958-8c73-a9df74cd6835 service nova] [instance: 6213702e-8e39-4342-b62f-2c9495017bf9] Received event network-changed-f6fffc80-6395-4f72-8a63-b037918502c8 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1686.750607] env[62405]: DEBUG nova.compute.manager [req-66fd8d8b-a167-47cf-85b1-23196b357859 req-dc070039-8fb3-4958-8c73-a9df74cd6835 service nova] [instance: 6213702e-8e39-4342-b62f-2c9495017bf9] Refreshing instance network info cache due to event network-changed-f6fffc80-6395-4f72-8a63-b037918502c8. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1686.750981] env[62405]: DEBUG oslo_concurrency.lockutils [req-66fd8d8b-a167-47cf-85b1-23196b357859 req-dc070039-8fb3-4958-8c73-a9df74cd6835 service nova] Acquiring lock "refresh_cache-6213702e-8e39-4342-b62f-2c9495017bf9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1686.750981] env[62405]: DEBUG oslo_concurrency.lockutils [req-66fd8d8b-a167-47cf-85b1-23196b357859 req-dc070039-8fb3-4958-8c73-a9df74cd6835 service nova] Acquired lock "refresh_cache-6213702e-8e39-4342-b62f-2c9495017bf9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1686.751144] env[62405]: DEBUG nova.network.neutron [req-66fd8d8b-a167-47cf-85b1-23196b357859 req-dc070039-8fb3-4958-8c73-a9df74cd6835 service nova] [instance: 6213702e-8e39-4342-b62f-2c9495017bf9] Refreshing network info cache for port f6fffc80-6395-4f72-8a63-b037918502c8 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1686.883383] env[62405]: DEBUG oslo_concurrency.lockutils [None req-40310896-b13c-4525-845a-9678f76700fa tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.271s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1686.885964] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e7b1419d-628e-439d-a22f-d99f8a6f3824 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.815s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1686.887185] env[62405]: DEBUG nova.objects.instance [None req-e7b1419d-628e-439d-a22f-d99f8a6f3824 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Lazy-loading 'resources' on Instance uuid 1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1686.920742] env[62405]: DEBUG oslo_vmware.api [None req-27b60465-7ba5-442f-bd81-ad53bbfe7350 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1947337, 'name': CloneVM_Task} progress is 94%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1686.923192] env[62405]: INFO nova.scheduler.client.report [None req-40310896-b13c-4525-845a-9678f76700fa tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Deleted allocations for instance b8ff115b-64f1-4584-afa2-478c5e6b726b [ 1687.215617] env[62405]: DEBUG nova.compute.manager [req-d855c51c-aae1-44fa-92a7-c3862727f824 req-68271304-1072-44fd-90f4-5afa4a6e21be service nova] [instance: f0ca0d3d-cb2b-467b-a466-c270794055d7] Received event network-vif-deleted-44befb6d-082c-47e0-9834-f3c7dc3d3210 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1687.215827] env[62405]: INFO nova.compute.manager [req-d855c51c-aae1-44fa-92a7-c3862727f824 req-68271304-1072-44fd-90f4-5afa4a6e21be service nova] [instance: f0ca0d3d-cb2b-467b-a466-c270794055d7] Neutron deleted interface 44befb6d-082c-47e0-9834-f3c7dc3d3210; detaching it from the instance and deleting it from the info cache [ 1687.216095] env[62405]: DEBUG nova.network.neutron [req-d855c51c-aae1-44fa-92a7-c3862727f824 req-68271304-1072-44fd-90f4-5afa4a6e21be service nova] [instance: f0ca0d3d-cb2b-467b-a466-c270794055d7] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1687.250948] env[62405]: DEBUG oslo_concurrency.lockutils [None req-891c1657-6ad6-4516-8dc8-410e9eead4b6 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquiring lock "6c6a3974-c87e-47ed-a025-d6221a8decd7" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1687.251280] env[62405]: DEBUG oslo_concurrency.lockutils [None req-891c1657-6ad6-4516-8dc8-410e9eead4b6 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Lock "6c6a3974-c87e-47ed-a025-d6221a8decd7" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1687.251436] env[62405]: INFO nova.compute.manager [None req-891c1657-6ad6-4516-8dc8-410e9eead4b6 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 6c6a3974-c87e-47ed-a025-d6221a8decd7] Shelving [ 1687.417175] env[62405]: DEBUG oslo_vmware.api [None req-27b60465-7ba5-442f-bd81-ad53bbfe7350 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1947337, 'name': CloneVM_Task} progress is 94%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1687.435677] env[62405]: DEBUG oslo_concurrency.lockutils [None req-40310896-b13c-4525-845a-9678f76700fa tempest-ListImageFiltersTestJSON-38693891 tempest-ListImageFiltersTestJSON-38693891-project-member] Lock "b8ff115b-64f1-4584-afa2-478c5e6b726b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.289s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1687.449335] env[62405]: DEBUG nova.network.neutron [-] [instance: f0ca0d3d-cb2b-467b-a466-c270794055d7] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1687.455111] env[62405]: DEBUG nova.network.neutron [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] [instance: b4693268-4d12-4c96-a8f9-7b1bb9705c89] Successfully updated port: a6d143aa-0ba8-425d-888b-e637ee77db16 {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1687.693721] env[62405]: DEBUG nova.network.neutron [req-66fd8d8b-a167-47cf-85b1-23196b357859 req-dc070039-8fb3-4958-8c73-a9df74cd6835 service nova] [instance: 6213702e-8e39-4342-b62f-2c9495017bf9] Updated VIF entry in instance network info cache for port f6fffc80-6395-4f72-8a63-b037918502c8. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1687.693721] env[62405]: DEBUG nova.network.neutron [req-66fd8d8b-a167-47cf-85b1-23196b357859 req-dc070039-8fb3-4958-8c73-a9df74cd6835 service nova] [instance: 6213702e-8e39-4342-b62f-2c9495017bf9] Updating instance_info_cache with network_info: [{"id": "f6fffc80-6395-4f72-8a63-b037918502c8", "address": "fa:16:3e:e9:4c:f5", "network": {"id": "bdf0ffbc-8220-49ae-80a5-06dfea99bea9", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1271406419-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "024f8c817a3142b983afd4018e025452", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ff90ec9-3c7e-4e76-b409-fcf37fc588d8", "external-id": "nsx-vlan-transportzone-475", "segmentation_id": 475, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf6fffc80-63", "ovs_interfaceid": "f6fffc80-6395-4f72-8a63-b037918502c8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1687.725094] env[62405]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-378b1a75-de8d-4dae-8c13-2f774ae3a2f9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.739306] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecc0a843-519d-4bbf-a7df-443a4f5ef0ef {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.797861] env[62405]: DEBUG nova.compute.manager [req-d855c51c-aae1-44fa-92a7-c3862727f824 req-68271304-1072-44fd-90f4-5afa4a6e21be service nova] [instance: f0ca0d3d-cb2b-467b-a466-c270794055d7] Detach interface failed, port_id=44befb6d-082c-47e0-9834-f3c7dc3d3210, reason: Instance f0ca0d3d-cb2b-467b-a466-c270794055d7 could not be found. {{(pid=62405) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11483}} [ 1687.885721] env[62405]: DEBUG oslo_concurrency.lockutils [None req-191757c6-f44a-4279-9f83-a52c84b6e0dc tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Acquiring lock "6213702e-8e39-4342-b62f-2c9495017bf9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1687.885896] env[62405]: DEBUG oslo_concurrency.lockutils [None req-191757c6-f44a-4279-9f83-a52c84b6e0dc tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Lock "6213702e-8e39-4342-b62f-2c9495017bf9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1687.886142] env[62405]: DEBUG oslo_concurrency.lockutils [None req-191757c6-f44a-4279-9f83-a52c84b6e0dc tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Acquiring lock "6213702e-8e39-4342-b62f-2c9495017bf9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1687.886333] env[62405]: DEBUG oslo_concurrency.lockutils [None req-191757c6-f44a-4279-9f83-a52c84b6e0dc tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Lock "6213702e-8e39-4342-b62f-2c9495017bf9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1687.886510] env[62405]: DEBUG oslo_concurrency.lockutils [None req-191757c6-f44a-4279-9f83-a52c84b6e0dc tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Lock "6213702e-8e39-4342-b62f-2c9495017bf9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1687.889991] env[62405]: INFO nova.compute.manager [None req-191757c6-f44a-4279-9f83-a52c84b6e0dc tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] [instance: 6213702e-8e39-4342-b62f-2c9495017bf9] Terminating instance [ 1687.918655] env[62405]: DEBUG oslo_vmware.api [None req-27b60465-7ba5-442f-bd81-ad53bbfe7350 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1947337, 'name': CloneVM_Task} progress is 100%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1687.955386] env[62405]: INFO nova.compute.manager [-] [instance: f0ca0d3d-cb2b-467b-a466-c270794055d7] Took 1.55 seconds to deallocate network for instance. [ 1687.960246] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Acquiring lock "refresh_cache-b4693268-4d12-4c96-a8f9-7b1bb9705c89" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1687.961333] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Acquired lock "refresh_cache-b4693268-4d12-4c96-a8f9-7b1bb9705c89" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1687.961333] env[62405]: DEBUG nova.network.neutron [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] [instance: b4693268-4d12-4c96-a8f9-7b1bb9705c89] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1688.017167] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-438da1e9-5b3a-49e5-a49f-414cdf7d5569 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.033713] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70e4d75a-86ac-4d9c-9872-9d5d83d147e4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.071604] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12b6aa5d-f8cf-463a-960b-ead71d83477b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.082094] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-249219e6-5628-4cf6-99f3-25a9f14b3298 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.097331] env[62405]: DEBUG nova.compute.provider_tree [None req-e7b1419d-628e-439d-a22f-d99f8a6f3824 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1688.198693] env[62405]: DEBUG oslo_concurrency.lockutils [req-66fd8d8b-a167-47cf-85b1-23196b357859 req-dc070039-8fb3-4958-8c73-a9df74cd6835 service nova] Releasing lock "refresh_cache-6213702e-8e39-4342-b62f-2c9495017bf9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1688.271227] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-891c1657-6ad6-4516-8dc8-410e9eead4b6 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 6c6a3974-c87e-47ed-a025-d6221a8decd7] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1688.271227] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0f261963-fcaf-40fe-a263-815f6a371e78 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.281542] env[62405]: DEBUG oslo_vmware.api [None req-891c1657-6ad6-4516-8dc8-410e9eead4b6 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for the task: (returnval){ [ 1688.281542] env[62405]: value = "task-1947338" [ 1688.281542] env[62405]: _type = "Task" [ 1688.281542] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1688.293071] env[62405]: DEBUG oslo_vmware.api [None req-891c1657-6ad6-4516-8dc8-410e9eead4b6 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947338, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1688.395434] env[62405]: DEBUG nova.compute.manager [None req-191757c6-f44a-4279-9f83-a52c84b6e0dc tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] [instance: 6213702e-8e39-4342-b62f-2c9495017bf9] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1688.395737] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-191757c6-f44a-4279-9f83-a52c84b6e0dc tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] [instance: 6213702e-8e39-4342-b62f-2c9495017bf9] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1688.396662] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c1b005c-6dbb-40c5-ba55-44343a93a144 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.405151] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-191757c6-f44a-4279-9f83-a52c84b6e0dc tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] [instance: 6213702e-8e39-4342-b62f-2c9495017bf9] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1688.405429] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6c733dc3-380c-4705-a111-a6af438d79fa {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.416188] env[62405]: DEBUG oslo_vmware.api [None req-27b60465-7ba5-442f-bd81-ad53bbfe7350 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1947337, 'name': CloneVM_Task, 'duration_secs': 1.564819} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1688.417592] env[62405]: INFO nova.virt.vmwareapi.vmops [None req-27b60465-7ba5-442f-bd81-ad53bbfe7350 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Created linked-clone VM from snapshot [ 1688.417922] env[62405]: DEBUG oslo_vmware.api [None req-191757c6-f44a-4279-9f83-a52c84b6e0dc tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Waiting for the task: (returnval){ [ 1688.417922] env[62405]: value = "task-1947339" [ 1688.417922] env[62405]: _type = "Task" [ 1688.417922] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1688.418644] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2668dcc8-9f8d-4b72-bedb-b372e156ca89 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.432667] env[62405]: DEBUG oslo_vmware.api [None req-191757c6-f44a-4279-9f83-a52c84b6e0dc tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Task: {'id': task-1947339, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1688.439452] env[62405]: DEBUG nova.virt.vmwareapi.images [None req-27b60465-7ba5-442f-bd81-ad53bbfe7350 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Uploading image 6ef894cb-7aec-49b0-9d6b-4b554296fb09 {{(pid=62405) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1688.463023] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f349824b-af14-4f91-8e16-cda5a76aaccb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1688.476285] env[62405]: DEBUG oslo_vmware.rw_handles [None req-27b60465-7ba5-442f-bd81-ad53bbfe7350 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1688.476285] env[62405]: value = "vm-401447" [ 1688.476285] env[62405]: _type = "VirtualMachine" [ 1688.476285] env[62405]: }. {{(pid=62405) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1688.477334] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-0ba2a48c-fbbf-4212-afd9-503c87873dde {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.488360] env[62405]: DEBUG oslo_vmware.rw_handles [None req-27b60465-7ba5-442f-bd81-ad53bbfe7350 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Lease: (returnval){ [ 1688.488360] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]526bc339-4a80-4496-a65d-7db69bb568b7" [ 1688.488360] env[62405]: _type = "HttpNfcLease" [ 1688.488360] env[62405]: } obtained for exporting VM: (result){ [ 1688.488360] env[62405]: value = "vm-401447" [ 1688.488360] env[62405]: _type = "VirtualMachine" [ 1688.488360] env[62405]: }. {{(pid=62405) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1688.489413] env[62405]: DEBUG oslo_vmware.api [None req-27b60465-7ba5-442f-bd81-ad53bbfe7350 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for the lease: (returnval){ [ 1688.489413] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]526bc339-4a80-4496-a65d-7db69bb568b7" [ 1688.489413] env[62405]: _type = "HttpNfcLease" [ 1688.489413] env[62405]: } to be ready. {{(pid=62405) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1688.500800] env[62405]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1688.500800] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]526bc339-4a80-4496-a65d-7db69bb568b7" [ 1688.500800] env[62405]: _type = "HttpNfcLease" [ 1688.500800] env[62405]: } is initializing. {{(pid=62405) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1688.534787] env[62405]: DEBUG nova.network.neutron [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] [instance: b4693268-4d12-4c96-a8f9-7b1bb9705c89] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1688.600739] env[62405]: DEBUG nova.scheduler.client.report [None req-e7b1419d-628e-439d-a22f-d99f8a6f3824 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1688.714828] env[62405]: DEBUG oslo_concurrency.lockutils [None req-436e8c88-ca82-462a-9aec-7f7d0c3c9ecf tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Acquiring lock "377365a4-7538-4bab-a181-1940e6fb4066" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1688.714828] env[62405]: DEBUG oslo_concurrency.lockutils [None req-436e8c88-ca82-462a-9aec-7f7d0c3c9ecf tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Lock "377365a4-7538-4bab-a181-1940e6fb4066" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1688.715222] env[62405]: DEBUG oslo_concurrency.lockutils [None req-436e8c88-ca82-462a-9aec-7f7d0c3c9ecf tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Acquiring lock "377365a4-7538-4bab-a181-1940e6fb4066-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1688.715900] env[62405]: DEBUG oslo_concurrency.lockutils [None req-436e8c88-ca82-462a-9aec-7f7d0c3c9ecf tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Lock "377365a4-7538-4bab-a181-1940e6fb4066-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1688.715900] env[62405]: DEBUG oslo_concurrency.lockutils [None req-436e8c88-ca82-462a-9aec-7f7d0c3c9ecf tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Lock "377365a4-7538-4bab-a181-1940e6fb4066-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1688.722497] env[62405]: INFO nova.compute.manager [None req-436e8c88-ca82-462a-9aec-7f7d0c3c9ecf tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] [instance: 377365a4-7538-4bab-a181-1940e6fb4066] Terminating instance [ 1688.790961] env[62405]: DEBUG oslo_vmware.api [None req-891c1657-6ad6-4516-8dc8-410e9eead4b6 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947338, 'name': PowerOffVM_Task, 'duration_secs': 0.341555} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1688.791322] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-891c1657-6ad6-4516-8dc8-410e9eead4b6 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 6c6a3974-c87e-47ed-a025-d6221a8decd7] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1688.792116] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da5fc3dd-3f84-46ea-a299-a7ca72c82e4b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.816752] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cd81ae9-f796-4088-b53b-596f9aa1b57a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.938581] env[62405]: DEBUG oslo_vmware.api [None req-191757c6-f44a-4279-9f83-a52c84b6e0dc tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Task: {'id': task-1947339, 'name': PowerOffVM_Task, 'duration_secs': 0.30653} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1688.939058] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-191757c6-f44a-4279-9f83-a52c84b6e0dc tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] [instance: 6213702e-8e39-4342-b62f-2c9495017bf9] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1688.939391] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-191757c6-f44a-4279-9f83-a52c84b6e0dc tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] [instance: 6213702e-8e39-4342-b62f-2c9495017bf9] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1688.941357] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ac32534d-76ae-46ce-af88-90ea75c6a788 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.000135] env[62405]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1689.000135] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]526bc339-4a80-4496-a65d-7db69bb568b7" [ 1689.000135] env[62405]: _type = "HttpNfcLease" [ 1689.000135] env[62405]: } is ready. {{(pid=62405) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1689.000535] env[62405]: DEBUG oslo_vmware.rw_handles [None req-27b60465-7ba5-442f-bd81-ad53bbfe7350 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1689.000535] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]526bc339-4a80-4496-a65d-7db69bb568b7" [ 1689.000535] env[62405]: _type = "HttpNfcLease" [ 1689.000535] env[62405]: }. {{(pid=62405) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1689.001816] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a91d49e-e37b-42d0-bd9d-c07a37b8d0eb {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.011519] env[62405]: DEBUG oslo_vmware.rw_handles [None req-27b60465-7ba5-442f-bd81-ad53bbfe7350 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5226f612-f208-7633-6e84-baf3b7a57d3e/disk-0.vmdk from lease info. {{(pid=62405) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1689.011717] env[62405]: DEBUG oslo_vmware.rw_handles [None req-27b60465-7ba5-442f-bd81-ad53bbfe7350 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5226f612-f208-7633-6e84-baf3b7a57d3e/disk-0.vmdk for reading. {{(pid=62405) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1689.082612] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-191757c6-f44a-4279-9f83-a52c84b6e0dc tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] [instance: 6213702e-8e39-4342-b62f-2c9495017bf9] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1689.082818] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-191757c6-f44a-4279-9f83-a52c84b6e0dc tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] [instance: 6213702e-8e39-4342-b62f-2c9495017bf9] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1689.083025] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-191757c6-f44a-4279-9f83-a52c84b6e0dc tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Deleting the datastore file [datastore1] 6213702e-8e39-4342-b62f-2c9495017bf9 {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1689.083651] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2c75b7cd-1184-4db7-b2bb-7d72146d96ab {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.094636] env[62405]: DEBUG oslo_vmware.api [None req-191757c6-f44a-4279-9f83-a52c84b6e0dc tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Waiting for the task: (returnval){ [ 1689.094636] env[62405]: value = "task-1947342" [ 1689.094636] env[62405]: _type = "Task" [ 1689.094636] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1689.108037] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e7b1419d-628e-439d-a22f-d99f8a6f3824 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.222s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1689.110372] env[62405]: DEBUG oslo_vmware.api [None req-191757c6-f44a-4279-9f83-a52c84b6e0dc tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Task: {'id': task-1947342, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1689.110887] env[62405]: DEBUG oslo_concurrency.lockutils [None req-eb04ba5e-6e45-4d29-b34e-8621c99d2b06 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.850s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1689.111158] env[62405]: DEBUG nova.objects.instance [None req-eb04ba5e-6e45-4d29-b34e-8621c99d2b06 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Lazy-loading 'resources' on Instance uuid c392d6f3-b638-4857-826d-760c38b7d291 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1689.139820] env[62405]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-99abe600-b91e-435b-921e-9d4f87052eaa {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.155023] env[62405]: INFO nova.scheduler.client.report [None req-e7b1419d-628e-439d-a22f-d99f8a6f3824 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Deleted allocations for instance 1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac [ 1689.155840] env[62405]: DEBUG nova.network.neutron [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] [instance: b4693268-4d12-4c96-a8f9-7b1bb9705c89] Updating instance_info_cache with network_info: [{"id": "a6d143aa-0ba8-425d-888b-e637ee77db16", "address": "fa:16:3e:83:33:27", "network": {"id": "5d0b89dd-9822-4198-8ace-5181f3390bc8", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1529652822-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7f3b7603d8d94bfeba8f26b6e99baae7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "99be9a5e-b3f9-4e6c-83d5-df11f817847d", "external-id": "nsx-vlan-transportzone-566", "segmentation_id": 566, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa6d143aa-0b", "ovs_interfaceid": "a6d143aa-0ba8-425d-888b-e637ee77db16", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1689.230379] env[62405]: DEBUG nova.compute.manager [None req-436e8c88-ca82-462a-9aec-7f7d0c3c9ecf tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] [instance: 377365a4-7538-4bab-a181-1940e6fb4066] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1689.230795] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-436e8c88-ca82-462a-9aec-7f7d0c3c9ecf tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] [instance: 377365a4-7538-4bab-a181-1940e6fb4066] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1689.232567] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3276c969-13e1-4a27-a0c9-a44998831dd8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.245266] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-436e8c88-ca82-462a-9aec-7f7d0c3c9ecf tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] [instance: 377365a4-7538-4bab-a181-1940e6fb4066] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1689.245607] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6ca71e71-de46-4274-9fe1-22a9dc6057f5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.256583] env[62405]: DEBUG oslo_vmware.api [None req-436e8c88-ca82-462a-9aec-7f7d0c3c9ecf tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Waiting for the task: (returnval){ [ 1689.256583] env[62405]: value = "task-1947343" [ 1689.256583] env[62405]: _type = "Task" [ 1689.256583] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1689.275893] env[62405]: DEBUG oslo_vmware.api [None req-436e8c88-ca82-462a-9aec-7f7d0c3c9ecf tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Task: {'id': task-1947343, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1689.330056] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-891c1657-6ad6-4516-8dc8-410e9eead4b6 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 6c6a3974-c87e-47ed-a025-d6221a8decd7] Creating Snapshot of the VM instance {{(pid=62405) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1689.330507] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-b2eaa761-5560-4d8a-bea6-7d97c45ea529 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.348605] env[62405]: DEBUG oslo_vmware.api [None req-891c1657-6ad6-4516-8dc8-410e9eead4b6 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for the task: (returnval){ [ 1689.348605] env[62405]: value = "task-1947344" [ 1689.348605] env[62405]: _type = "Task" [ 1689.348605] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1689.357734] env[62405]: DEBUG oslo_vmware.api [None req-891c1657-6ad6-4516-8dc8-410e9eead4b6 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947344, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1689.385593] env[62405]: DEBUG nova.compute.manager [req-8d300c6a-5a2d-4890-9d2a-a22b26b85203 req-f8200e1c-2456-4d6d-b0b9-7018d68d1b32 service nova] [instance: b4693268-4d12-4c96-a8f9-7b1bb9705c89] Received event network-vif-plugged-a6d143aa-0ba8-425d-888b-e637ee77db16 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1689.385926] env[62405]: DEBUG oslo_concurrency.lockutils [req-8d300c6a-5a2d-4890-9d2a-a22b26b85203 req-f8200e1c-2456-4d6d-b0b9-7018d68d1b32 service nova] Acquiring lock "b4693268-4d12-4c96-a8f9-7b1bb9705c89-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1689.386249] env[62405]: DEBUG oslo_concurrency.lockutils [req-8d300c6a-5a2d-4890-9d2a-a22b26b85203 req-f8200e1c-2456-4d6d-b0b9-7018d68d1b32 service nova] Lock "b4693268-4d12-4c96-a8f9-7b1bb9705c89-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1689.386769] env[62405]: DEBUG oslo_concurrency.lockutils [req-8d300c6a-5a2d-4890-9d2a-a22b26b85203 req-f8200e1c-2456-4d6d-b0b9-7018d68d1b32 service nova] Lock "b4693268-4d12-4c96-a8f9-7b1bb9705c89-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1689.386769] env[62405]: DEBUG nova.compute.manager [req-8d300c6a-5a2d-4890-9d2a-a22b26b85203 req-f8200e1c-2456-4d6d-b0b9-7018d68d1b32 service nova] [instance: b4693268-4d12-4c96-a8f9-7b1bb9705c89] No waiting events found dispatching network-vif-plugged-a6d143aa-0ba8-425d-888b-e637ee77db16 {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1689.386953] env[62405]: WARNING nova.compute.manager [req-8d300c6a-5a2d-4890-9d2a-a22b26b85203 req-f8200e1c-2456-4d6d-b0b9-7018d68d1b32 service nova] [instance: b4693268-4d12-4c96-a8f9-7b1bb9705c89] Received unexpected event network-vif-plugged-a6d143aa-0ba8-425d-888b-e637ee77db16 for instance with vm_state building and task_state spawning. [ 1689.387223] env[62405]: DEBUG nova.compute.manager [req-8d300c6a-5a2d-4890-9d2a-a22b26b85203 req-f8200e1c-2456-4d6d-b0b9-7018d68d1b32 service nova] [instance: b4693268-4d12-4c96-a8f9-7b1bb9705c89] Received event network-changed-a6d143aa-0ba8-425d-888b-e637ee77db16 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1689.387427] env[62405]: DEBUG nova.compute.manager [req-8d300c6a-5a2d-4890-9d2a-a22b26b85203 req-f8200e1c-2456-4d6d-b0b9-7018d68d1b32 service nova] [instance: b4693268-4d12-4c96-a8f9-7b1bb9705c89] Refreshing instance network info cache due to event network-changed-a6d143aa-0ba8-425d-888b-e637ee77db16. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1689.387916] env[62405]: DEBUG oslo_concurrency.lockutils [req-8d300c6a-5a2d-4890-9d2a-a22b26b85203 req-f8200e1c-2456-4d6d-b0b9-7018d68d1b32 service nova] Acquiring lock "refresh_cache-b4693268-4d12-4c96-a8f9-7b1bb9705c89" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1689.609123] env[62405]: DEBUG oslo_vmware.api [None req-191757c6-f44a-4279-9f83-a52c84b6e0dc tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Task: {'id': task-1947342, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.343239} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1689.609819] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-191757c6-f44a-4279-9f83-a52c84b6e0dc tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1689.609819] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-191757c6-f44a-4279-9f83-a52c84b6e0dc tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] [instance: 6213702e-8e39-4342-b62f-2c9495017bf9] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1689.610600] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-191757c6-f44a-4279-9f83-a52c84b6e0dc tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] [instance: 6213702e-8e39-4342-b62f-2c9495017bf9] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1689.610600] env[62405]: INFO nova.compute.manager [None req-191757c6-f44a-4279-9f83-a52c84b6e0dc tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] [instance: 6213702e-8e39-4342-b62f-2c9495017bf9] Took 1.21 seconds to destroy the instance on the hypervisor. [ 1689.610600] env[62405]: DEBUG oslo.service.loopingcall [None req-191757c6-f44a-4279-9f83-a52c84b6e0dc tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1689.610831] env[62405]: DEBUG nova.compute.manager [-] [instance: 6213702e-8e39-4342-b62f-2c9495017bf9] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1689.610883] env[62405]: DEBUG nova.network.neutron [-] [instance: 6213702e-8e39-4342-b62f-2c9495017bf9] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1689.662360] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Releasing lock "refresh_cache-b4693268-4d12-4c96-a8f9-7b1bb9705c89" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1689.663538] env[62405]: DEBUG nova.compute.manager [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] [instance: b4693268-4d12-4c96-a8f9-7b1bb9705c89] Instance network_info: |[{"id": "a6d143aa-0ba8-425d-888b-e637ee77db16", "address": "fa:16:3e:83:33:27", "network": {"id": "5d0b89dd-9822-4198-8ace-5181f3390bc8", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1529652822-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7f3b7603d8d94bfeba8f26b6e99baae7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "99be9a5e-b3f9-4e6c-83d5-df11f817847d", "external-id": "nsx-vlan-transportzone-566", "segmentation_id": 566, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa6d143aa-0b", "ovs_interfaceid": "a6d143aa-0ba8-425d-888b-e637ee77db16", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1689.665852] env[62405]: DEBUG oslo_concurrency.lockutils [req-8d300c6a-5a2d-4890-9d2a-a22b26b85203 req-f8200e1c-2456-4d6d-b0b9-7018d68d1b32 service nova] Acquired lock "refresh_cache-b4693268-4d12-4c96-a8f9-7b1bb9705c89" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1689.668933] env[62405]: DEBUG nova.network.neutron [req-8d300c6a-5a2d-4890-9d2a-a22b26b85203 req-f8200e1c-2456-4d6d-b0b9-7018d68d1b32 service nova] [instance: b4693268-4d12-4c96-a8f9-7b1bb9705c89] Refreshing network info cache for port a6d143aa-0ba8-425d-888b-e637ee77db16 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1689.674521] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] [instance: b4693268-4d12-4c96-a8f9-7b1bb9705c89] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:83:33:27', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '99be9a5e-b3f9-4e6c-83d5-df11f817847d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a6d143aa-0ba8-425d-888b-e637ee77db16', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1689.691746] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Creating folder: Project (7f3b7603d8d94bfeba8f26b6e99baae7). Parent ref: group-v401284. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1689.698358] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e7b1419d-628e-439d-a22f-d99f8a6f3824 tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Lock "1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.409s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1689.703567] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f0f3573c-218a-4699-84f3-fe89022d27fe {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.730608] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Created folder: Project (7f3b7603d8d94bfeba8f26b6e99baae7) in parent group-v401284. [ 1689.734055] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Creating folder: Instances. Parent ref: group-v401448. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1689.734055] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b1777d73-510a-49ed-aea0-7407aa03f7b2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.754320] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Created folder: Instances in parent group-v401448. [ 1689.754991] env[62405]: DEBUG oslo.service.loopingcall [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1689.759337] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b4693268-4d12-4c96-a8f9-7b1bb9705c89] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1689.763559] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-94042826-a168-471c-aa6d-43239185bfc2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.798706] env[62405]: DEBUG oslo_vmware.api [None req-436e8c88-ca82-462a-9aec-7f7d0c3c9ecf tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Task: {'id': task-1947343, 'name': PowerOffVM_Task, 'duration_secs': 0.208487} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1689.801025] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-436e8c88-ca82-462a-9aec-7f7d0c3c9ecf tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] [instance: 377365a4-7538-4bab-a181-1940e6fb4066] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1689.802380] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-436e8c88-ca82-462a-9aec-7f7d0c3c9ecf tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] [instance: 377365a4-7538-4bab-a181-1940e6fb4066] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1689.802650] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1689.802650] env[62405]: value = "task-1947347" [ 1689.802650] env[62405]: _type = "Task" [ 1689.802650] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1689.806171] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-da507088-9c2f-468d-b080-7323258743d7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.827808] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947347, 'name': CreateVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1689.858802] env[62405]: DEBUG oslo_vmware.api [None req-891c1657-6ad6-4516-8dc8-410e9eead4b6 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947344, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1689.939971] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-436e8c88-ca82-462a-9aec-7f7d0c3c9ecf tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] [instance: 377365a4-7538-4bab-a181-1940e6fb4066] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1689.939971] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-436e8c88-ca82-462a-9aec-7f7d0c3c9ecf tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] [instance: 377365a4-7538-4bab-a181-1940e6fb4066] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1689.940209] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-436e8c88-ca82-462a-9aec-7f7d0c3c9ecf tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Deleting the datastore file [datastore1] 377365a4-7538-4bab-a181-1940e6fb4066 {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1689.940626] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3453a923-c07d-4d68-bdd4-9b6cedc4555f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.950747] env[62405]: DEBUG oslo_vmware.api [None req-436e8c88-ca82-462a-9aec-7f7d0c3c9ecf tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Waiting for the task: (returnval){ [ 1689.950747] env[62405]: value = "task-1947349" [ 1689.950747] env[62405]: _type = "Task" [ 1689.950747] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1689.970304] env[62405]: DEBUG oslo_vmware.api [None req-436e8c88-ca82-462a-9aec-7f7d0c3c9ecf tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Task: {'id': task-1947349, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1690.289118] env[62405]: DEBUG oslo_concurrency.lockutils [None req-72b686d3-fd8d-4c4c-81cd-baf36262a0bb tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Acquiring lock "0f2d81cb-da2a-4664-b9a2-b8c3c38ddc73" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1690.289457] env[62405]: DEBUG oslo_concurrency.lockutils [None req-72b686d3-fd8d-4c4c-81cd-baf36262a0bb tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Lock "0f2d81cb-da2a-4664-b9a2-b8c3c38ddc73" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1690.290076] env[62405]: DEBUG oslo_concurrency.lockutils [None req-72b686d3-fd8d-4c4c-81cd-baf36262a0bb tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Acquiring lock "0f2d81cb-da2a-4664-b9a2-b8c3c38ddc73-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1690.290265] env[62405]: DEBUG oslo_concurrency.lockutils [None req-72b686d3-fd8d-4c4c-81cd-baf36262a0bb tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Lock "0f2d81cb-da2a-4664-b9a2-b8c3c38ddc73-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1690.290448] env[62405]: DEBUG oslo_concurrency.lockutils [None req-72b686d3-fd8d-4c4c-81cd-baf36262a0bb tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Lock "0f2d81cb-da2a-4664-b9a2-b8c3c38ddc73-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1690.292792] env[62405]: INFO nova.compute.manager [None req-72b686d3-fd8d-4c4c-81cd-baf36262a0bb tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] [instance: 0f2d81cb-da2a-4664-b9a2-b8c3c38ddc73] Terminating instance [ 1690.323539] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947347, 'name': CreateVM_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1690.357031] env[62405]: DEBUG nova.network.neutron [req-8d300c6a-5a2d-4890-9d2a-a22b26b85203 req-f8200e1c-2456-4d6d-b0b9-7018d68d1b32 service nova] [instance: b4693268-4d12-4c96-a8f9-7b1bb9705c89] Updated VIF entry in instance network info cache for port a6d143aa-0ba8-425d-888b-e637ee77db16. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1690.358280] env[62405]: DEBUG nova.network.neutron [req-8d300c6a-5a2d-4890-9d2a-a22b26b85203 req-f8200e1c-2456-4d6d-b0b9-7018d68d1b32 service nova] [instance: b4693268-4d12-4c96-a8f9-7b1bb9705c89] Updating instance_info_cache with network_info: [{"id": "a6d143aa-0ba8-425d-888b-e637ee77db16", "address": "fa:16:3e:83:33:27", "network": {"id": "5d0b89dd-9822-4198-8ace-5181f3390bc8", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1529652822-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7f3b7603d8d94bfeba8f26b6e99baae7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "99be9a5e-b3f9-4e6c-83d5-df11f817847d", "external-id": "nsx-vlan-transportzone-566", "segmentation_id": 566, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa6d143aa-0b", "ovs_interfaceid": "a6d143aa-0ba8-425d-888b-e637ee77db16", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1690.367598] env[62405]: DEBUG oslo_vmware.api [None req-891c1657-6ad6-4516-8dc8-410e9eead4b6 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947344, 'name': CreateSnapshot_Task, 'duration_secs': 0.753821} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1690.368962] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-891c1657-6ad6-4516-8dc8-410e9eead4b6 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 6c6a3974-c87e-47ed-a025-d6221a8decd7] Created Snapshot of the VM instance {{(pid=62405) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1690.369454] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46b7bb28-8f30-4d1b-97da-4863429769d1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.389217] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0a40ec9-ca94-4981-ae89-7ebe27b75b3c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.403262] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4e2cd34-c5b7-4ce7-be55-4bdd35d58d5b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.442292] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f040c9ad-d595-48e8-afee-bb702bcbd918 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.451423] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a37f2ab3-cd58-4d18-8159-6abd29b3982a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.471343] env[62405]: DEBUG nova.compute.provider_tree [None req-eb04ba5e-6e45-4d29-b34e-8621c99d2b06 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1690.475389] env[62405]: DEBUG oslo_vmware.api [None req-436e8c88-ca82-462a-9aec-7f7d0c3c9ecf tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Task: {'id': task-1947349, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.183142} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1690.475872] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-436e8c88-ca82-462a-9aec-7f7d0c3c9ecf tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1690.477143] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-436e8c88-ca82-462a-9aec-7f7d0c3c9ecf tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] [instance: 377365a4-7538-4bab-a181-1940e6fb4066] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1690.477143] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-436e8c88-ca82-462a-9aec-7f7d0c3c9ecf tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] [instance: 377365a4-7538-4bab-a181-1940e6fb4066] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1690.477143] env[62405]: INFO nova.compute.manager [None req-436e8c88-ca82-462a-9aec-7f7d0c3c9ecf tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] [instance: 377365a4-7538-4bab-a181-1940e6fb4066] Took 1.25 seconds to destroy the instance on the hypervisor. [ 1690.477143] env[62405]: DEBUG oslo.service.loopingcall [None req-436e8c88-ca82-462a-9aec-7f7d0c3c9ecf tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1690.477143] env[62405]: DEBUG nova.compute.manager [-] [instance: 377365a4-7538-4bab-a181-1940e6fb4066] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1690.477143] env[62405]: DEBUG nova.network.neutron [-] [instance: 377365a4-7538-4bab-a181-1940e6fb4066] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1690.805731] env[62405]: DEBUG oslo_concurrency.lockutils [None req-72b686d3-fd8d-4c4c-81cd-baf36262a0bb tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Acquiring lock "refresh_cache-0f2d81cb-da2a-4664-b9a2-b8c3c38ddc73" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1690.805917] env[62405]: DEBUG oslo_concurrency.lockutils [None req-72b686d3-fd8d-4c4c-81cd-baf36262a0bb tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Acquired lock "refresh_cache-0f2d81cb-da2a-4664-b9a2-b8c3c38ddc73" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1690.806328] env[62405]: DEBUG nova.network.neutron [None req-72b686d3-fd8d-4c4c-81cd-baf36262a0bb tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] [instance: 0f2d81cb-da2a-4664-b9a2-b8c3c38ddc73] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1690.827935] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947347, 'name': CreateVM_Task, 'duration_secs': 0.612735} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1690.828397] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b4693268-4d12-4c96-a8f9-7b1bb9705c89] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1690.828940] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1690.829878] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1690.829878] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1690.830366] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2e003a83-0a93-444b-961d-96943f6a7a74 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.837247] env[62405]: DEBUG oslo_vmware.api [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Waiting for the task: (returnval){ [ 1690.837247] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52380395-9160-cd7e-a529-aefa6149dbac" [ 1690.837247] env[62405]: _type = "Task" [ 1690.837247] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1690.849594] env[62405]: DEBUG oslo_vmware.api [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52380395-9160-cd7e-a529-aefa6149dbac, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1690.870704] env[62405]: DEBUG oslo_concurrency.lockutils [req-8d300c6a-5a2d-4890-9d2a-a22b26b85203 req-f8200e1c-2456-4d6d-b0b9-7018d68d1b32 service nova] Releasing lock "refresh_cache-b4693268-4d12-4c96-a8f9-7b1bb9705c89" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1690.872048] env[62405]: DEBUG nova.compute.manager [req-8d300c6a-5a2d-4890-9d2a-a22b26b85203 req-f8200e1c-2456-4d6d-b0b9-7018d68d1b32 service nova] [instance: 34ec55c6-1a7a-4ffa-8efd-9eedd7495d44] Received event network-changed-6afd5e2e-fe5f-4f25-a879-a25672a67740 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1690.872317] env[62405]: DEBUG nova.compute.manager [req-8d300c6a-5a2d-4890-9d2a-a22b26b85203 req-f8200e1c-2456-4d6d-b0b9-7018d68d1b32 service nova] [instance: 34ec55c6-1a7a-4ffa-8efd-9eedd7495d44] Refreshing instance network info cache due to event network-changed-6afd5e2e-fe5f-4f25-a879-a25672a67740. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1690.872671] env[62405]: DEBUG oslo_concurrency.lockutils [req-8d300c6a-5a2d-4890-9d2a-a22b26b85203 req-f8200e1c-2456-4d6d-b0b9-7018d68d1b32 service nova] Acquiring lock "refresh_cache-34ec55c6-1a7a-4ffa-8efd-9eedd7495d44" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1690.872893] env[62405]: DEBUG oslo_concurrency.lockutils [req-8d300c6a-5a2d-4890-9d2a-a22b26b85203 req-f8200e1c-2456-4d6d-b0b9-7018d68d1b32 service nova] Acquired lock "refresh_cache-34ec55c6-1a7a-4ffa-8efd-9eedd7495d44" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1690.873404] env[62405]: DEBUG nova.network.neutron [req-8d300c6a-5a2d-4890-9d2a-a22b26b85203 req-f8200e1c-2456-4d6d-b0b9-7018d68d1b32 service nova] [instance: 34ec55c6-1a7a-4ffa-8efd-9eedd7495d44] Refreshing network info cache for port 6afd5e2e-fe5f-4f25-a879-a25672a67740 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1690.894496] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-891c1657-6ad6-4516-8dc8-410e9eead4b6 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 6c6a3974-c87e-47ed-a025-d6221a8decd7] Creating linked-clone VM from snapshot {{(pid=62405) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1690.895308] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-014e89c5-f5cb-487b-9101-5e7aea313600 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.908937] env[62405]: DEBUG nova.network.neutron [-] [instance: 6213702e-8e39-4342-b62f-2c9495017bf9] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1690.908937] env[62405]: DEBUG oslo_vmware.api [None req-891c1657-6ad6-4516-8dc8-410e9eead4b6 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for the task: (returnval){ [ 1690.908937] env[62405]: value = "task-1947350" [ 1690.908937] env[62405]: _type = "Task" [ 1690.908937] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1690.922200] env[62405]: DEBUG oslo_vmware.api [None req-891c1657-6ad6-4516-8dc8-410e9eead4b6 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947350, 'name': CloneVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1690.977831] env[62405]: DEBUG nova.scheduler.client.report [None req-eb04ba5e-6e45-4d29-b34e-8621c99d2b06 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1691.344501] env[62405]: DEBUG nova.network.neutron [None req-72b686d3-fd8d-4c4c-81cd-baf36262a0bb tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] [instance: 0f2d81cb-da2a-4664-b9a2-b8c3c38ddc73] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1691.353454] env[62405]: DEBUG oslo_vmware.api [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52380395-9160-cd7e-a529-aefa6149dbac, 'name': SearchDatastore_Task, 'duration_secs': 0.023827} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1691.353678] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1691.353809] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] [instance: b4693268-4d12-4c96-a8f9-7b1bb9705c89] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1691.354153] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1691.354310] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1691.354592] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1691.354892] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-314f561a-e82e-4d08-b2be-4fc18507b11a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.366171] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1691.366395] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1691.369425] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a24b699f-d522-4833-baa4-f4da5211c673 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.378382] env[62405]: DEBUG oslo_vmware.api [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Waiting for the task: (returnval){ [ 1691.378382] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52734975-68fb-1f27-e6aa-b25f37113cea" [ 1691.378382] env[62405]: _type = "Task" [ 1691.378382] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1691.392031] env[62405]: DEBUG oslo_vmware.api [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52734975-68fb-1f27-e6aa-b25f37113cea, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1691.411014] env[62405]: INFO nova.compute.manager [-] [instance: 6213702e-8e39-4342-b62f-2c9495017bf9] Took 1.80 seconds to deallocate network for instance. [ 1691.428452] env[62405]: DEBUG oslo_vmware.api [None req-891c1657-6ad6-4516-8dc8-410e9eead4b6 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947350, 'name': CloneVM_Task} progress is 94%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1691.469462] env[62405]: DEBUG nova.compute.manager [req-949fb4df-8ac3-4910-b909-8d825a843adb req-37736dcc-fe0d-4aac-8be3-c25270703c88 service nova] [instance: 6213702e-8e39-4342-b62f-2c9495017bf9] Received event network-vif-deleted-f6fffc80-6395-4f72-8a63-b037918502c8 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1691.469725] env[62405]: DEBUG nova.compute.manager [req-949fb4df-8ac3-4910-b909-8d825a843adb req-37736dcc-fe0d-4aac-8be3-c25270703c88 service nova] [instance: 377365a4-7538-4bab-a181-1940e6fb4066] Received event network-vif-deleted-e57e57ca-cb20-4bcb-bdee-5c96e246e949 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1691.469895] env[62405]: INFO nova.compute.manager [req-949fb4df-8ac3-4910-b909-8d825a843adb req-37736dcc-fe0d-4aac-8be3-c25270703c88 service nova] [instance: 377365a4-7538-4bab-a181-1940e6fb4066] Neutron deleted interface e57e57ca-cb20-4bcb-bdee-5c96e246e949; detaching it from the instance and deleting it from the info cache [ 1691.470097] env[62405]: DEBUG nova.network.neutron [req-949fb4df-8ac3-4910-b909-8d825a843adb req-37736dcc-fe0d-4aac-8be3-c25270703c88 service nova] [instance: 377365a4-7538-4bab-a181-1940e6fb4066] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1691.474155] env[62405]: DEBUG nova.network.neutron [None req-72b686d3-fd8d-4c4c-81cd-baf36262a0bb tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] [instance: 0f2d81cb-da2a-4664-b9a2-b8c3c38ddc73] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1691.484486] env[62405]: DEBUG oslo_concurrency.lockutils [None req-eb04ba5e-6e45-4d29-b34e-8621c99d2b06 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.373s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1691.487692] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8c1b8076-ff38-43f7-a3f4-0dbc196ee36a tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 30.433s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1691.514697] env[62405]: INFO nova.scheduler.client.report [None req-eb04ba5e-6e45-4d29-b34e-8621c99d2b06 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Deleted allocations for instance c392d6f3-b638-4857-826d-760c38b7d291 [ 1691.556948] env[62405]: DEBUG nova.network.neutron [-] [instance: 377365a4-7538-4bab-a181-1940e6fb4066] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1691.823329] env[62405]: DEBUG nova.network.neutron [req-8d300c6a-5a2d-4890-9d2a-a22b26b85203 req-f8200e1c-2456-4d6d-b0b9-7018d68d1b32 service nova] [instance: 34ec55c6-1a7a-4ffa-8efd-9eedd7495d44] Updated VIF entry in instance network info cache for port 6afd5e2e-fe5f-4f25-a879-a25672a67740. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1691.823749] env[62405]: DEBUG nova.network.neutron [req-8d300c6a-5a2d-4890-9d2a-a22b26b85203 req-f8200e1c-2456-4d6d-b0b9-7018d68d1b32 service nova] [instance: 34ec55c6-1a7a-4ffa-8efd-9eedd7495d44] Updating instance_info_cache with network_info: [{"id": "6afd5e2e-fe5f-4f25-a879-a25672a67740", "address": "fa:16:3e:31:fc:df", "network": {"id": "890b933d-5687-4c3b-aab8-4c8d68c71772", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-315909913-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.196", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "915d6ea5e5184efab9fbeda21e3b8a64", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "399f3826-705c-45f7-9fe0-3a08a945151a", "external-id": "nsx-vlan-transportzone-936", "segmentation_id": 936, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6afd5e2e-fe", "ovs_interfaceid": "6afd5e2e-fe5f-4f25-a879-a25672a67740", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1691.898030] env[62405]: DEBUG oslo_vmware.api [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52734975-68fb-1f27-e6aa-b25f37113cea, 'name': SearchDatastore_Task, 'duration_secs': 0.016764} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1691.898030] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b67f968c-da0a-4878-967a-d2ed865302a7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.908228] env[62405]: DEBUG oslo_vmware.api [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Waiting for the task: (returnval){ [ 1691.908228] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52e6d642-47f5-b2f4-8d5b-eed4660bcae2" [ 1691.908228] env[62405]: _type = "Task" [ 1691.908228] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1691.923405] env[62405]: DEBUG oslo_concurrency.lockutils [None req-191757c6-f44a-4279-9f83-a52c84b6e0dc tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1691.924091] env[62405]: DEBUG oslo_vmware.api [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52e6d642-47f5-b2f4-8d5b-eed4660bcae2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1691.927751] env[62405]: DEBUG oslo_vmware.api [None req-891c1657-6ad6-4516-8dc8-410e9eead4b6 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947350, 'name': CloneVM_Task} progress is 95%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1691.974934] env[62405]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-675f2c95-65f8-415d-a118-3cbc882edc3e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.979159] env[62405]: DEBUG oslo_concurrency.lockutils [None req-72b686d3-fd8d-4c4c-81cd-baf36262a0bb tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Releasing lock "refresh_cache-0f2d81cb-da2a-4664-b9a2-b8c3c38ddc73" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1691.979159] env[62405]: DEBUG nova.compute.manager [None req-72b686d3-fd8d-4c4c-81cd-baf36262a0bb tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] [instance: 0f2d81cb-da2a-4664-b9a2-b8c3c38ddc73] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1691.979159] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-72b686d3-fd8d-4c4c-81cd-baf36262a0bb tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] [instance: 0f2d81cb-da2a-4664-b9a2-b8c3c38ddc73] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1691.979512] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f3df4c2-8db2-4ed6-ac85-25c6178e3799 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.988795] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-72b686d3-fd8d-4c4c-81cd-baf36262a0bb tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] [instance: 0f2d81cb-da2a-4664-b9a2-b8c3c38ddc73] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1691.990847] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-01685da6-76b7-4465-8bdf-d79061777075 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.995747] env[62405]: INFO nova.compute.claims [None req-8c1b8076-ff38-43f7-a3f4-0dbc196ee36a tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1692.001198] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-010d2ea0-d817-4ce3-b4c9-6c4aa1c748ca {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.022644] env[62405]: DEBUG oslo_vmware.api [None req-72b686d3-fd8d-4c4c-81cd-baf36262a0bb tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Waiting for the task: (returnval){ [ 1692.022644] env[62405]: value = "task-1947351" [ 1692.022644] env[62405]: _type = "Task" [ 1692.022644] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1692.028991] env[62405]: DEBUG oslo_concurrency.lockutils [None req-eb04ba5e-6e45-4d29-b34e-8621c99d2b06 tempest-InstanceActionsTestJSON-2109960104 tempest-InstanceActionsTestJSON-2109960104-project-member] Lock "c392d6f3-b638-4857-826d-760c38b7d291" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.576s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1692.043059] env[62405]: DEBUG nova.compute.manager [req-949fb4df-8ac3-4910-b909-8d825a843adb req-37736dcc-fe0d-4aac-8be3-c25270703c88 service nova] [instance: 377365a4-7538-4bab-a181-1940e6fb4066] Detach interface failed, port_id=e57e57ca-cb20-4bcb-bdee-5c96e246e949, reason: Instance 377365a4-7538-4bab-a181-1940e6fb4066 could not be found. {{(pid=62405) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11483}} [ 1692.047786] env[62405]: DEBUG oslo_vmware.api [None req-72b686d3-fd8d-4c4c-81cd-baf36262a0bb tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Task: {'id': task-1947351, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1692.060461] env[62405]: INFO nova.compute.manager [-] [instance: 377365a4-7538-4bab-a181-1940e6fb4066] Took 1.58 seconds to deallocate network for instance. [ 1692.188120] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Acquiring lock "46240f5b-c6ab-481b-b20c-80cc727a79f4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1692.188550] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Lock "46240f5b-c6ab-481b-b20c-80cc727a79f4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1692.327893] env[62405]: DEBUG oslo_concurrency.lockutils [req-8d300c6a-5a2d-4890-9d2a-a22b26b85203 req-f8200e1c-2456-4d6d-b0b9-7018d68d1b32 service nova] Releasing lock "refresh_cache-34ec55c6-1a7a-4ffa-8efd-9eedd7495d44" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1692.428061] env[62405]: DEBUG oslo_vmware.api [None req-891c1657-6ad6-4516-8dc8-410e9eead4b6 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947350, 'name': CloneVM_Task, 'duration_secs': 1.5045} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1692.433089] env[62405]: INFO nova.virt.vmwareapi.vmops [None req-891c1657-6ad6-4516-8dc8-410e9eead4b6 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 6c6a3974-c87e-47ed-a025-d6221a8decd7] Created linked-clone VM from snapshot [ 1692.433740] env[62405]: DEBUG oslo_vmware.api [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52e6d642-47f5-b2f4-8d5b-eed4660bcae2, 'name': SearchDatastore_Task, 'duration_secs': 0.024147} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1692.434649] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e3d5eb1-fdcd-4ffa-8ebc-b5ec49dbcbcf {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.439055] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1692.439714] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] b4693268-4d12-4c96-a8f9-7b1bb9705c89/b4693268-4d12-4c96-a8f9-7b1bb9705c89.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1692.442558] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-02849a30-6d0e-492c-8a02-128dbcf548e0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.451209] env[62405]: DEBUG nova.virt.vmwareapi.images [None req-891c1657-6ad6-4516-8dc8-410e9eead4b6 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 6c6a3974-c87e-47ed-a025-d6221a8decd7] Uploading image c12df1d1-31b2-4713-95a6-8fa9cde64dc8 {{(pid=62405) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1692.456780] env[62405]: DEBUG oslo_vmware.api [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Waiting for the task: (returnval){ [ 1692.456780] env[62405]: value = "task-1947352" [ 1692.456780] env[62405]: _type = "Task" [ 1692.456780] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1692.470133] env[62405]: DEBUG oslo_vmware.api [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Task: {'id': task-1947352, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1692.488819] env[62405]: DEBUG oslo_vmware.rw_handles [None req-891c1657-6ad6-4516-8dc8-410e9eead4b6 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1692.488819] env[62405]: value = "vm-401452" [ 1692.488819] env[62405]: _type = "VirtualMachine" [ 1692.488819] env[62405]: }. {{(pid=62405) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1692.488819] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-7de8aa53-f95e-4b8c-bc55-ca1f1e96c943 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.499218] env[62405]: DEBUG oslo_vmware.rw_handles [None req-891c1657-6ad6-4516-8dc8-410e9eead4b6 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Lease: (returnval){ [ 1692.499218] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52c8777e-787d-c1fd-d559-9da2f8f2e180" [ 1692.499218] env[62405]: _type = "HttpNfcLease" [ 1692.499218] env[62405]: } obtained for exporting VM: (result){ [ 1692.499218] env[62405]: value = "vm-401452" [ 1692.499218] env[62405]: _type = "VirtualMachine" [ 1692.499218] env[62405]: }. {{(pid=62405) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1692.499218] env[62405]: DEBUG oslo_vmware.api [None req-891c1657-6ad6-4516-8dc8-410e9eead4b6 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for the lease: (returnval){ [ 1692.499218] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52c8777e-787d-c1fd-d559-9da2f8f2e180" [ 1692.499218] env[62405]: _type = "HttpNfcLease" [ 1692.499218] env[62405]: } to be ready. {{(pid=62405) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1692.508086] env[62405]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1692.508086] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52c8777e-787d-c1fd-d559-9da2f8f2e180" [ 1692.508086] env[62405]: _type = "HttpNfcLease" [ 1692.508086] env[62405]: } is initializing. {{(pid=62405) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1692.517962] env[62405]: INFO nova.compute.resource_tracker [None req-8c1b8076-ff38-43f7-a3f4-0dbc196ee36a tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Updating resource usage from migration 2e9f9f5c-75f9-4fbb-a793-3dac9f3417c4 [ 1692.534898] env[62405]: DEBUG oslo_vmware.api [None req-72b686d3-fd8d-4c4c-81cd-baf36262a0bb tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Task: {'id': task-1947351, 'name': PowerOffVM_Task, 'duration_secs': 0.138487} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1692.535343] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-72b686d3-fd8d-4c4c-81cd-baf36262a0bb tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] [instance: 0f2d81cb-da2a-4664-b9a2-b8c3c38ddc73] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1692.536021] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-72b686d3-fd8d-4c4c-81cd-baf36262a0bb tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] [instance: 0f2d81cb-da2a-4664-b9a2-b8c3c38ddc73] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1692.536351] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a31020b2-37c8-444a-8467-ac8c82ef2170 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.577584] env[62405]: DEBUG oslo_concurrency.lockutils [None req-436e8c88-ca82-462a-9aec-7f7d0c3c9ecf tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1692.577584] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-72b686d3-fd8d-4c4c-81cd-baf36262a0bb tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] [instance: 0f2d81cb-da2a-4664-b9a2-b8c3c38ddc73] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1692.577584] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-72b686d3-fd8d-4c4c-81cd-baf36262a0bb tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] [instance: 0f2d81cb-da2a-4664-b9a2-b8c3c38ddc73] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1692.577584] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-72b686d3-fd8d-4c4c-81cd-baf36262a0bb tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Deleting the datastore file [datastore1] 0f2d81cb-da2a-4664-b9a2-b8c3c38ddc73 {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1692.577584] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cea1c54a-37d2-4b3b-9af8-02f3d2cbf942 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.590192] env[62405]: DEBUG oslo_vmware.api [None req-72b686d3-fd8d-4c4c-81cd-baf36262a0bb tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Waiting for the task: (returnval){ [ 1692.590192] env[62405]: value = "task-1947355" [ 1692.590192] env[62405]: _type = "Task" [ 1692.590192] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1692.601370] env[62405]: DEBUG oslo_vmware.api [None req-72b686d3-fd8d-4c4c-81cd-baf36262a0bb tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Task: {'id': task-1947355, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1692.643141] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Acquiring lock "9b21fa71-8a0e-446a-9492-59e2b068237c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1692.643304] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Lock "9b21fa71-8a0e-446a-9492-59e2b068237c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1692.690987] env[62405]: DEBUG nova.compute.manager [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] [instance: 46240f5b-c6ab-481b-b20c-80cc727a79f4] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1692.970567] env[62405]: DEBUG oslo_vmware.api [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Task: {'id': task-1947352, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1693.019206] env[62405]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1693.019206] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52c8777e-787d-c1fd-d559-9da2f8f2e180" [ 1693.019206] env[62405]: _type = "HttpNfcLease" [ 1693.019206] env[62405]: } is ready. {{(pid=62405) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1693.019618] env[62405]: DEBUG oslo_vmware.rw_handles [None req-891c1657-6ad6-4516-8dc8-410e9eead4b6 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1693.019618] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52c8777e-787d-c1fd-d559-9da2f8f2e180" [ 1693.019618] env[62405]: _type = "HttpNfcLease" [ 1693.019618] env[62405]: }. {{(pid=62405) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1693.020451] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37706f3c-d0d2-4507-92a7-ecba08e88165 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.030429] env[62405]: DEBUG oslo_vmware.rw_handles [None req-891c1657-6ad6-4516-8dc8-410e9eead4b6 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/526afd7f-870c-1ba3-6b45-690b4657032e/disk-0.vmdk from lease info. {{(pid=62405) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1693.030522] env[62405]: DEBUG oslo_vmware.rw_handles [None req-891c1657-6ad6-4516-8dc8-410e9eead4b6 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/526afd7f-870c-1ba3-6b45-690b4657032e/disk-0.vmdk for reading. {{(pid=62405) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1693.110974] env[62405]: DEBUG oslo_vmware.api [None req-72b686d3-fd8d-4c4c-81cd-baf36262a0bb tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Task: {'id': task-1947355, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.424283} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1693.111255] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-72b686d3-fd8d-4c4c-81cd-baf36262a0bb tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1693.111448] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-72b686d3-fd8d-4c4c-81cd-baf36262a0bb tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] [instance: 0f2d81cb-da2a-4664-b9a2-b8c3c38ddc73] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1693.111627] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-72b686d3-fd8d-4c4c-81cd-baf36262a0bb tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] [instance: 0f2d81cb-da2a-4664-b9a2-b8c3c38ddc73] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1693.111800] env[62405]: INFO nova.compute.manager [None req-72b686d3-fd8d-4c4c-81cd-baf36262a0bb tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] [instance: 0f2d81cb-da2a-4664-b9a2-b8c3c38ddc73] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1693.112525] env[62405]: DEBUG oslo.service.loopingcall [None req-72b686d3-fd8d-4c4c-81cd-baf36262a0bb tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1693.112525] env[62405]: DEBUG nova.compute.manager [-] [instance: 0f2d81cb-da2a-4664-b9a2-b8c3c38ddc73] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1693.112525] env[62405]: DEBUG nova.network.neutron [-] [instance: 0f2d81cb-da2a-4664-b9a2-b8c3c38ddc73] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1693.149956] env[62405]: DEBUG nova.network.neutron [-] [instance: 0f2d81cb-da2a-4664-b9a2-b8c3c38ddc73] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1693.155790] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84015cd5-55da-40ee-9810-f7ce34d1ab9c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.164830] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47a327fa-210d-4812-81e2-1445fef4a789 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.172256] env[62405]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-82327007-d9ff-4784-802d-267589cd7558 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.209916] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-070f3db0-99d8-4abd-a6d3-febf6924ab0d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.219557] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aaf99633-db5e-4511-b273-0b505b69931f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.238080] env[62405]: DEBUG nova.compute.provider_tree [None req-8c1b8076-ff38-43f7-a3f4-0dbc196ee36a tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1693.247224] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1693.470922] env[62405]: DEBUG oslo_vmware.api [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Task: {'id': task-1947352, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.623293} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1693.471290] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] b4693268-4d12-4c96-a8f9-7b1bb9705c89/b4693268-4d12-4c96-a8f9-7b1bb9705c89.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1693.471534] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] [instance: b4693268-4d12-4c96-a8f9-7b1bb9705c89] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1693.471844] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5eb8beae-9aba-40d3-a3c8-168ec8238c2e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.485970] env[62405]: DEBUG oslo_vmware.api [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Waiting for the task: (returnval){ [ 1693.485970] env[62405]: value = "task-1947356" [ 1693.485970] env[62405]: _type = "Task" [ 1693.485970] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1693.501028] env[62405]: DEBUG oslo_vmware.api [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Task: {'id': task-1947356, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1693.652882] env[62405]: DEBUG nova.network.neutron [-] [instance: 0f2d81cb-da2a-4664-b9a2-b8c3c38ddc73] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1693.779745] env[62405]: ERROR nova.scheduler.client.report [None req-8c1b8076-ff38-43f7-a3f4-0dbc196ee36a tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [req-edde1181-9507-4267-896d-80ffd0759c0c] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7d5eded7-a501-4fa6-b1d3-60e273d555d7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-edde1181-9507-4267-896d-80ffd0759c0c"}]} [ 1693.806166] env[62405]: DEBUG nova.scheduler.client.report [None req-8c1b8076-ff38-43f7-a3f4-0dbc196ee36a tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Refreshing inventories for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1693.830799] env[62405]: DEBUG nova.scheduler.client.report [None req-8c1b8076-ff38-43f7-a3f4-0dbc196ee36a tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Updating ProviderTree inventory for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1693.832529] env[62405]: DEBUG nova.compute.provider_tree [None req-8c1b8076-ff38-43f7-a3f4-0dbc196ee36a tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1693.857894] env[62405]: DEBUG nova.scheduler.client.report [None req-8c1b8076-ff38-43f7-a3f4-0dbc196ee36a tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Refreshing aggregate associations for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7, aggregates: None {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1693.892626] env[62405]: DEBUG nova.scheduler.client.report [None req-8c1b8076-ff38-43f7-a3f4-0dbc196ee36a tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Refreshing trait associations for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1693.999340] env[62405]: DEBUG oslo_vmware.api [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Task: {'id': task-1947356, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.08433} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1694.003193] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] [instance: b4693268-4d12-4c96-a8f9-7b1bb9705c89] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1694.005827] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6350cbc-c41a-42c3-8be5-19d3be9b0eae {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.036237] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] [instance: b4693268-4d12-4c96-a8f9-7b1bb9705c89] Reconfiguring VM instance instance-00000035 to attach disk [datastore1] b4693268-4d12-4c96-a8f9-7b1bb9705c89/b4693268-4d12-4c96-a8f9-7b1bb9705c89.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1694.041250] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-18899845-4ad2-424d-bed1-3468fbff896f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.066407] env[62405]: DEBUG oslo_vmware.api [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Waiting for the task: (returnval){ [ 1694.066407] env[62405]: value = "task-1947357" [ 1694.066407] env[62405]: _type = "Task" [ 1694.066407] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1694.076689] env[62405]: DEBUG oslo_vmware.api [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Task: {'id': task-1947357, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1694.157230] env[62405]: INFO nova.compute.manager [-] [instance: 0f2d81cb-da2a-4664-b9a2-b8c3c38ddc73] Took 1.04 seconds to deallocate network for instance. [ 1694.580706] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f743d8bc-8b5c-4a4d-96f3-7a62d2b55659 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.587456] env[62405]: DEBUG oslo_vmware.api [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Task: {'id': task-1947357, 'name': ReconfigVM_Task, 'duration_secs': 0.500714} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1694.588744] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] [instance: b4693268-4d12-4c96-a8f9-7b1bb9705c89] Reconfigured VM instance instance-00000035 to attach disk [datastore1] b4693268-4d12-4c96-a8f9-7b1bb9705c89/b4693268-4d12-4c96-a8f9-7b1bb9705c89.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1694.589603] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-896bb388-3723-4dfa-bbcc-0098e03a9070 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.597749] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-556fe9f1-258b-464c-aad0-f9f295b91671 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.647639] env[62405]: DEBUG oslo_vmware.api [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Waiting for the task: (returnval){ [ 1694.647639] env[62405]: value = "task-1947358" [ 1694.647639] env[62405]: _type = "Task" [ 1694.647639] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1694.647639] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22255acd-639c-46f0-bb9b-619c13dea950 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.647639] env[62405]: DEBUG oslo_vmware.api [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Task: {'id': task-1947358, 'name': Rename_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1694.652542] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f4044c0-f912-4039-a1e7-70ff2f7cda76 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.671097] env[62405]: DEBUG oslo_concurrency.lockutils [None req-72b686d3-fd8d-4c4c-81cd-baf36262a0bb tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1694.671952] env[62405]: DEBUG nova.compute.provider_tree [None req-8c1b8076-ff38-43f7-a3f4-0dbc196ee36a tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1695.117276] env[62405]: DEBUG oslo_vmware.api [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Task: {'id': task-1947358, 'name': Rename_Task, 'duration_secs': 0.216018} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1695.117698] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] [instance: b4693268-4d12-4c96-a8f9-7b1bb9705c89] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1695.118718] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dfd50a62-8b5d-4c9b-8a70-c1d143c33d1e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.126827] env[62405]: DEBUG oslo_vmware.api [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Waiting for the task: (returnval){ [ 1695.126827] env[62405]: value = "task-1947359" [ 1695.126827] env[62405]: _type = "Task" [ 1695.126827] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1695.136466] env[62405]: DEBUG oslo_vmware.api [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Task: {'id': task-1947359, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1695.210848] env[62405]: DEBUG nova.scheduler.client.report [None req-8c1b8076-ff38-43f7-a3f4-0dbc196ee36a tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Updated inventory for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with generation 89 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1695.211209] env[62405]: DEBUG nova.compute.provider_tree [None req-8c1b8076-ff38-43f7-a3f4-0dbc196ee36a tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Updating resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 generation from 89 to 90 during operation: update_inventory {{(pid=62405) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1695.211434] env[62405]: DEBUG nova.compute.provider_tree [None req-8c1b8076-ff38-43f7-a3f4-0dbc196ee36a tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1695.638393] env[62405]: DEBUG oslo_vmware.api [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Task: {'id': task-1947359, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1695.719136] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8c1b8076-ff38-43f7-a3f4-0dbc196ee36a tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 4.232s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1695.719460] env[62405]: INFO nova.compute.manager [None req-8c1b8076-ff38-43f7-a3f4-0dbc196ee36a tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Migrating [ 1695.728556] env[62405]: DEBUG oslo_concurrency.lockutils [None req-543444ba-0ef2-4f11-a0fe-b1789c5f199c tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.370s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1695.728920] env[62405]: DEBUG nova.objects.instance [None req-543444ba-0ef2-4f11-a0fe-b1789c5f199c tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Lazy-loading 'resources' on Instance uuid 0491dc4b-cf35-4035-aca9-baf43b86af7e {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1696.139897] env[62405]: DEBUG oslo_vmware.api [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Task: {'id': task-1947359, 'name': PowerOnVM_Task, 'duration_secs': 0.637492} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1696.140218] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] [instance: b4693268-4d12-4c96-a8f9-7b1bb9705c89] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1696.140433] env[62405]: INFO nova.compute.manager [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] [instance: b4693268-4d12-4c96-a8f9-7b1bb9705c89] Took 10.52 seconds to spawn the instance on the hypervisor. [ 1696.140616] env[62405]: DEBUG nova.compute.manager [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] [instance: b4693268-4d12-4c96-a8f9-7b1bb9705c89] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1696.141628] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0481864e-94a6-46b4-8039-64656cdedf40 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.243866] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8c1b8076-ff38-43f7-a3f4-0dbc196ee36a tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Acquiring lock "refresh_cache-3c9487ff-2092-4cde-82d5-b38e5bc5c6e3" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1696.244116] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8c1b8076-ff38-43f7-a3f4-0dbc196ee36a tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Acquired lock "refresh_cache-3c9487ff-2092-4cde-82d5-b38e5bc5c6e3" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1696.244372] env[62405]: DEBUG nova.network.neutron [None req-8c1b8076-ff38-43f7-a3f4-0dbc196ee36a tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1696.666413] env[62405]: INFO nova.compute.manager [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] [instance: b4693268-4d12-4c96-a8f9-7b1bb9705c89] Took 49.37 seconds to build instance. [ 1696.718213] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea398e78-488e-4edb-9ce2-28ee5aa096b0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.732031] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0049cb77-5cbd-4d4c-ab76-f7b40ef0f299 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.766287] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-202e5ad8-4b52-473f-b584-be9269d5beaf {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.777475] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cabe36c9-3c91-4a56-a5e1-01ab462d1495 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.794883] env[62405]: DEBUG nova.compute.provider_tree [None req-543444ba-0ef2-4f11-a0fe-b1789c5f199c tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1697.145994] env[62405]: DEBUG nova.network.neutron [None req-8c1b8076-ff38-43f7-a3f4-0dbc196ee36a tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Updating instance_info_cache with network_info: [{"id": "7fbae16c-e943-4752-8a7e-92bdea130e1a", "address": "fa:16:3e:f9:2e:fa", "network": {"id": "9e3e6e3d-df77-428f-b577-e4a42e82ec43", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.63", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "69dc3a146cd14230b1180689e2fea090", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31e77685-b4dd-4810-80ef-24115ea9ea62", "external-id": "nsx-vlan-transportzone-56", "segmentation_id": 56, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7fbae16c-e9", "ovs_interfaceid": "7fbae16c-e943-4752-8a7e-92bdea130e1a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1697.168367] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c3597d46-3478-4c06-91cd-9903a57782b5 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Lock "b4693268-4d12-4c96-a8f9-7b1bb9705c89" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 50.877s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1697.299302] env[62405]: DEBUG nova.scheduler.client.report [None req-543444ba-0ef2-4f11-a0fe-b1789c5f199c tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1697.649467] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8c1b8076-ff38-43f7-a3f4-0dbc196ee36a tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Releasing lock "refresh_cache-3c9487ff-2092-4cde-82d5-b38e5bc5c6e3" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1697.671208] env[62405]: DEBUG nova.compute.manager [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] [instance: 9b21fa71-8a0e-446a-9492-59e2b068237c] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1697.787504] env[62405]: DEBUG oslo_vmware.rw_handles [None req-27b60465-7ba5-442f-bd81-ad53bbfe7350 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5226f612-f208-7633-6e84-baf3b7a57d3e/disk-0.vmdk. {{(pid=62405) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1697.788496] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b2782da-ff3d-4d3c-a80c-605ddd16b212 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.797680] env[62405]: DEBUG oslo_vmware.rw_handles [None req-27b60465-7ba5-442f-bd81-ad53bbfe7350 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5226f612-f208-7633-6e84-baf3b7a57d3e/disk-0.vmdk is in state: ready. {{(pid=62405) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1697.799491] env[62405]: ERROR oslo_vmware.rw_handles [None req-27b60465-7ba5-442f-bd81-ad53bbfe7350 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5226f612-f208-7633-6e84-baf3b7a57d3e/disk-0.vmdk due to incomplete transfer. [ 1697.799491] env[62405]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-7e3ea511-3008-4d53-950c-b67129e86bae {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.805272] env[62405]: DEBUG oslo_concurrency.lockutils [None req-543444ba-0ef2-4f11-a0fe-b1789c5f199c tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.077s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1697.809658] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2295ed20-ced6-48e8-b941-a33998d2c4ca tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.506s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1697.810660] env[62405]: DEBUG nova.objects.instance [None req-2295ed20-ced6-48e8-b941-a33998d2c4ca tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Lazy-loading 'resources' on Instance uuid f8c6f99f-499f-4886-aae9-5f08969175f6 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1697.811379] env[62405]: DEBUG oslo_vmware.rw_handles [None req-27b60465-7ba5-442f-bd81-ad53bbfe7350 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5226f612-f208-7633-6e84-baf3b7a57d3e/disk-0.vmdk. {{(pid=62405) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1697.811555] env[62405]: DEBUG nova.virt.vmwareapi.images [None req-27b60465-7ba5-442f-bd81-ad53bbfe7350 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Uploaded image 6ef894cb-7aec-49b0-9d6b-4b554296fb09 to the Glance image server {{(pid=62405) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1697.814642] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-27b60465-7ba5-442f-bd81-ad53bbfe7350 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Destroying the VM {{(pid=62405) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1697.815371] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-5bd329ff-334c-4056-8aaa-c8595e924472 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.824250] env[62405]: DEBUG oslo_vmware.api [None req-27b60465-7ba5-442f-bd81-ad53bbfe7350 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for the task: (returnval){ [ 1697.824250] env[62405]: value = "task-1947360" [ 1697.824250] env[62405]: _type = "Task" [ 1697.824250] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1697.835562] env[62405]: DEBUG oslo_vmware.api [None req-27b60465-7ba5-442f-bd81-ad53bbfe7350 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1947360, 'name': Destroy_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1697.837562] env[62405]: INFO nova.scheduler.client.report [None req-543444ba-0ef2-4f11-a0fe-b1789c5f199c tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Deleted allocations for instance 0491dc4b-cf35-4035-aca9-baf43b86af7e [ 1698.197307] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1698.337109] env[62405]: DEBUG oslo_vmware.api [None req-27b60465-7ba5-442f-bd81-ad53bbfe7350 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1947360, 'name': Destroy_Task} progress is 33%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1698.350583] env[62405]: DEBUG oslo_concurrency.lockutils [None req-543444ba-0ef2-4f11-a0fe-b1789c5f199c tempest-SecurityGroupsTestJSON-1573923907 tempest-SecurityGroupsTestJSON-1573923907-project-member] Lock "0491dc4b-cf35-4035-aca9-baf43b86af7e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.466s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1698.839216] env[62405]: DEBUG oslo_vmware.api [None req-27b60465-7ba5-442f-bd81-ad53bbfe7350 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1947360, 'name': Destroy_Task} progress is 100%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1698.851052] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32b9f2b4-f5a4-47b3-a69f-5039cc6bb984 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.858779] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Acquiring lock "742c8d94-48d1-4408-91dc-98f25661aa8d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1698.859479] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Lock "742c8d94-48d1-4408-91dc-98f25661aa8d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1698.864141] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9f8cbae-3ce9-4c7e-934d-55eaafbbc698 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.904117] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a013017-c07b-4bca-9f2a-ade54d019dd5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.912513] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6912ba92-ac54-49b4-b592-d5952b243c60 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.931038] env[62405]: DEBUG nova.compute.provider_tree [None req-2295ed20-ced6-48e8-b941-a33998d2c4ca tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1699.169885] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab7e2c58-033e-42f6-8f0e-885d886edda3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.192638] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-8c1b8076-ff38-43f7-a3f4-0dbc196ee36a tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Updating instance '3c9487ff-2092-4cde-82d5-b38e5bc5c6e3' progress to 0 {{(pid=62405) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1699.338813] env[62405]: DEBUG oslo_vmware.api [None req-27b60465-7ba5-442f-bd81-ad53bbfe7350 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1947360, 'name': Destroy_Task} progress is 100%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1699.434870] env[62405]: DEBUG nova.scheduler.client.report [None req-2295ed20-ced6-48e8-b941-a33998d2c4ca tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1699.701496] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c1b8076-ff38-43f7-a3f4-0dbc196ee36a tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1699.701496] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-06bace1d-7f77-479d-9958-87737c6f5f4d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.718025] env[62405]: DEBUG oslo_concurrency.lockutils [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Acquiring lock "f410acd2-f786-43bd-ad60-0a6248dedb1c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1699.718025] env[62405]: DEBUG oslo_concurrency.lockutils [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Lock "f410acd2-f786-43bd-ad60-0a6248dedb1c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1699.718025] env[62405]: DEBUG oslo_vmware.api [None req-8c1b8076-ff38-43f7-a3f4-0dbc196ee36a tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Waiting for the task: (returnval){ [ 1699.718025] env[62405]: value = "task-1947361" [ 1699.718025] env[62405]: _type = "Task" [ 1699.718025] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1699.728678] env[62405]: DEBUG oslo_vmware.api [None req-8c1b8076-ff38-43f7-a3f4-0dbc196ee36a tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Task: {'id': task-1947361, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1699.796026] env[62405]: DEBUG oslo_concurrency.lockutils [None req-42e76222-99f8-4abe-b058-8b52c8b5f5b9 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Acquiring lock "b4693268-4d12-4c96-a8f9-7b1bb9705c89" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1699.796026] env[62405]: DEBUG oslo_concurrency.lockutils [None req-42e76222-99f8-4abe-b058-8b52c8b5f5b9 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Lock "b4693268-4d12-4c96-a8f9-7b1bb9705c89" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1699.796026] env[62405]: DEBUG oslo_concurrency.lockutils [None req-42e76222-99f8-4abe-b058-8b52c8b5f5b9 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Acquiring lock "b4693268-4d12-4c96-a8f9-7b1bb9705c89-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1699.796026] env[62405]: DEBUG oslo_concurrency.lockutils [None req-42e76222-99f8-4abe-b058-8b52c8b5f5b9 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Lock "b4693268-4d12-4c96-a8f9-7b1bb9705c89-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1699.796026] env[62405]: DEBUG oslo_concurrency.lockutils [None req-42e76222-99f8-4abe-b058-8b52c8b5f5b9 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Lock "b4693268-4d12-4c96-a8f9-7b1bb9705c89-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1699.798707] env[62405]: INFO nova.compute.manager [None req-42e76222-99f8-4abe-b058-8b52c8b5f5b9 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] [instance: b4693268-4d12-4c96-a8f9-7b1bb9705c89] Terminating instance [ 1699.840309] env[62405]: DEBUG oslo_vmware.api [None req-27b60465-7ba5-442f-bd81-ad53bbfe7350 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1947360, 'name': Destroy_Task, 'duration_secs': 1.593916} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1699.841186] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-27b60465-7ba5-442f-bd81-ad53bbfe7350 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Destroyed the VM [ 1699.841690] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-27b60465-7ba5-442f-bd81-ad53bbfe7350 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Deleting Snapshot of the VM instance {{(pid=62405) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1699.842075] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-5646a409-bce1-4f7d-a585-4c37f5d30415 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.854279] env[62405]: DEBUG oslo_vmware.api [None req-27b60465-7ba5-442f-bd81-ad53bbfe7350 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for the task: (returnval){ [ 1699.854279] env[62405]: value = "task-1947362" [ 1699.854279] env[62405]: _type = "Task" [ 1699.854279] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1699.863038] env[62405]: DEBUG oslo_vmware.api [None req-27b60465-7ba5-442f-bd81-ad53bbfe7350 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1947362, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1699.940867] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2295ed20-ced6-48e8-b941-a33998d2c4ca tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.131s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1699.945040] env[62405]: DEBUG oslo_concurrency.lockutils [None req-71ec4483-05a9-473d-953a-ec97012f914e tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.634s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1699.945538] env[62405]: INFO nova.compute.claims [None req-71ec4483-05a9-473d-953a-ec97012f914e tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [instance: aae3abca-951a-4149-9ccb-d70bea218aea] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1699.987727] env[62405]: INFO nova.scheduler.client.report [None req-2295ed20-ced6-48e8-b941-a33998d2c4ca tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Deleted allocations for instance f8c6f99f-499f-4886-aae9-5f08969175f6 [ 1700.231308] env[62405]: DEBUG oslo_vmware.api [None req-8c1b8076-ff38-43f7-a3f4-0dbc196ee36a tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Task: {'id': task-1947361, 'name': PowerOffVM_Task, 'duration_secs': 0.238854} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1700.231570] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c1b8076-ff38-43f7-a3f4-0dbc196ee36a tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1700.231752] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-8c1b8076-ff38-43f7-a3f4-0dbc196ee36a tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Updating instance '3c9487ff-2092-4cde-82d5-b38e5bc5c6e3' progress to 17 {{(pid=62405) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1700.304392] env[62405]: DEBUG nova.compute.manager [None req-42e76222-99f8-4abe-b058-8b52c8b5f5b9 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] [instance: b4693268-4d12-4c96-a8f9-7b1bb9705c89] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1700.304598] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-42e76222-99f8-4abe-b058-8b52c8b5f5b9 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] [instance: b4693268-4d12-4c96-a8f9-7b1bb9705c89] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1700.305621] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ceb7884-6d49-435f-9421-17889e9cb8db {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.316616] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-42e76222-99f8-4abe-b058-8b52c8b5f5b9 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] [instance: b4693268-4d12-4c96-a8f9-7b1bb9705c89] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1700.316891] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-aa9f87c5-4307-4088-8070-82ae3a1a17f8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.324512] env[62405]: DEBUG oslo_vmware.api [None req-42e76222-99f8-4abe-b058-8b52c8b5f5b9 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Waiting for the task: (returnval){ [ 1700.324512] env[62405]: value = "task-1947363" [ 1700.324512] env[62405]: _type = "Task" [ 1700.324512] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1700.333901] env[62405]: DEBUG oslo_vmware.api [None req-42e76222-99f8-4abe-b058-8b52c8b5f5b9 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Task: {'id': task-1947363, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1700.363800] env[62405]: DEBUG oslo_vmware.api [None req-27b60465-7ba5-442f-bd81-ad53bbfe7350 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1947362, 'name': RemoveSnapshot_Task} progress is 16%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1700.502059] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2295ed20-ced6-48e8-b941-a33998d2c4ca tempest-ServersTestManualDisk-2027945359 tempest-ServersTestManualDisk-2027945359-project-member] Lock "f8c6f99f-499f-4886-aae9-5f08969175f6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.666s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1700.515202] env[62405]: DEBUG oslo_vmware.rw_handles [None req-891c1657-6ad6-4516-8dc8-410e9eead4b6 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/526afd7f-870c-1ba3-6b45-690b4657032e/disk-0.vmdk. {{(pid=62405) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1700.516675] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd950148-6df7-4cf5-a33b-785fd4a5fc80 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.526889] env[62405]: DEBUG oslo_vmware.rw_handles [None req-891c1657-6ad6-4516-8dc8-410e9eead4b6 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/526afd7f-870c-1ba3-6b45-690b4657032e/disk-0.vmdk is in state: ready. {{(pid=62405) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1700.527088] env[62405]: ERROR oslo_vmware.rw_handles [None req-891c1657-6ad6-4516-8dc8-410e9eead4b6 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/526afd7f-870c-1ba3-6b45-690b4657032e/disk-0.vmdk due to incomplete transfer. [ 1700.527336] env[62405]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-0ddd9b34-9701-4882-a114-34c5bcd63ad6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.539292] env[62405]: DEBUG oslo_vmware.rw_handles [None req-891c1657-6ad6-4516-8dc8-410e9eead4b6 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/526afd7f-870c-1ba3-6b45-690b4657032e/disk-0.vmdk. {{(pid=62405) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1700.539292] env[62405]: DEBUG nova.virt.vmwareapi.images [None req-891c1657-6ad6-4516-8dc8-410e9eead4b6 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 6c6a3974-c87e-47ed-a025-d6221a8decd7] Uploaded image c12df1d1-31b2-4713-95a6-8fa9cde64dc8 to the Glance image server {{(pid=62405) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1700.540512] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-891c1657-6ad6-4516-8dc8-410e9eead4b6 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 6c6a3974-c87e-47ed-a025-d6221a8decd7] Destroying the VM {{(pid=62405) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1700.541012] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-37d9ba4c-6131-4391-b740-3dbc483c9e5c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.548886] env[62405]: DEBUG oslo_vmware.api [None req-891c1657-6ad6-4516-8dc8-410e9eead4b6 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for the task: (returnval){ [ 1700.548886] env[62405]: value = "task-1947364" [ 1700.548886] env[62405]: _type = "Task" [ 1700.548886] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1700.561046] env[62405]: DEBUG oslo_vmware.api [None req-891c1657-6ad6-4516-8dc8-410e9eead4b6 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947364, 'name': Destroy_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1700.740312] env[62405]: DEBUG nova.virt.hardware [None req-8c1b8076-ff38-43f7-a3f4-0dbc196ee36a tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:21Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1700.740558] env[62405]: DEBUG nova.virt.hardware [None req-8c1b8076-ff38-43f7-a3f4-0dbc196ee36a tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1700.740728] env[62405]: DEBUG nova.virt.hardware [None req-8c1b8076-ff38-43f7-a3f4-0dbc196ee36a tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1700.740913] env[62405]: DEBUG nova.virt.hardware [None req-8c1b8076-ff38-43f7-a3f4-0dbc196ee36a tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1700.741069] env[62405]: DEBUG nova.virt.hardware [None req-8c1b8076-ff38-43f7-a3f4-0dbc196ee36a tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1700.741220] env[62405]: DEBUG nova.virt.hardware [None req-8c1b8076-ff38-43f7-a3f4-0dbc196ee36a tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1700.741422] env[62405]: DEBUG nova.virt.hardware [None req-8c1b8076-ff38-43f7-a3f4-0dbc196ee36a tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1700.741580] env[62405]: DEBUG nova.virt.hardware [None req-8c1b8076-ff38-43f7-a3f4-0dbc196ee36a tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1700.741742] env[62405]: DEBUG nova.virt.hardware [None req-8c1b8076-ff38-43f7-a3f4-0dbc196ee36a tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1700.741904] env[62405]: DEBUG nova.virt.hardware [None req-8c1b8076-ff38-43f7-a3f4-0dbc196ee36a tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1700.742872] env[62405]: DEBUG nova.virt.hardware [None req-8c1b8076-ff38-43f7-a3f4-0dbc196ee36a tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1700.749440] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a0fd408a-d9bb-4c52-b746-528d07aad6dc {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.773389] env[62405]: DEBUG oslo_vmware.api [None req-8c1b8076-ff38-43f7-a3f4-0dbc196ee36a tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Waiting for the task: (returnval){ [ 1700.773389] env[62405]: value = "task-1947365" [ 1700.773389] env[62405]: _type = "Task" [ 1700.773389] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1700.782877] env[62405]: DEBUG oslo_vmware.api [None req-8c1b8076-ff38-43f7-a3f4-0dbc196ee36a tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Task: {'id': task-1947365, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1700.834873] env[62405]: DEBUG oslo_vmware.api [None req-42e76222-99f8-4abe-b058-8b52c8b5f5b9 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Task: {'id': task-1947363, 'name': PowerOffVM_Task, 'duration_secs': 0.229428} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1700.835218] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-42e76222-99f8-4abe-b058-8b52c8b5f5b9 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] [instance: b4693268-4d12-4c96-a8f9-7b1bb9705c89] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1700.835460] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-42e76222-99f8-4abe-b058-8b52c8b5f5b9 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] [instance: b4693268-4d12-4c96-a8f9-7b1bb9705c89] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1700.835775] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ec708460-6380-4782-b73f-c8f562dc6f75 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.864169] env[62405]: DEBUG oslo_vmware.api [None req-27b60465-7ba5-442f-bd81-ad53bbfe7350 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1947362, 'name': RemoveSnapshot_Task, 'duration_secs': 0.832498} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1700.864517] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-27b60465-7ba5-442f-bd81-ad53bbfe7350 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Deleted Snapshot of the VM instance {{(pid=62405) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1700.864683] env[62405]: INFO nova.compute.manager [None req-27b60465-7ba5-442f-bd81-ad53bbfe7350 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Took 16.60 seconds to snapshot the instance on the hypervisor. [ 1701.034333] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-42e76222-99f8-4abe-b058-8b52c8b5f5b9 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] [instance: b4693268-4d12-4c96-a8f9-7b1bb9705c89] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1701.034608] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-42e76222-99f8-4abe-b058-8b52c8b5f5b9 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] [instance: b4693268-4d12-4c96-a8f9-7b1bb9705c89] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1701.034806] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-42e76222-99f8-4abe-b058-8b52c8b5f5b9 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Deleting the datastore file [datastore1] b4693268-4d12-4c96-a8f9-7b1bb9705c89 {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1701.035227] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7f9a9006-d6ce-48da-ac4a-cbab25e7bd3f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.043514] env[62405]: DEBUG oslo_vmware.api [None req-42e76222-99f8-4abe-b058-8b52c8b5f5b9 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Waiting for the task: (returnval){ [ 1701.043514] env[62405]: value = "task-1947367" [ 1701.043514] env[62405]: _type = "Task" [ 1701.043514] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1701.061260] env[62405]: DEBUG oslo_vmware.api [None req-42e76222-99f8-4abe-b058-8b52c8b5f5b9 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Task: {'id': task-1947367, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1701.067729] env[62405]: DEBUG oslo_vmware.api [None req-891c1657-6ad6-4516-8dc8-410e9eead4b6 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947364, 'name': Destroy_Task, 'duration_secs': 0.418562} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1701.067995] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-891c1657-6ad6-4516-8dc8-410e9eead4b6 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 6c6a3974-c87e-47ed-a025-d6221a8decd7] Destroyed the VM [ 1701.069278] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-891c1657-6ad6-4516-8dc8-410e9eead4b6 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 6c6a3974-c87e-47ed-a025-d6221a8decd7] Deleting Snapshot of the VM instance {{(pid=62405) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1701.069278] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-4cd4b9c5-899b-40e1-b0d5-b6a5e55f00b2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.079460] env[62405]: DEBUG oslo_vmware.api [None req-891c1657-6ad6-4516-8dc8-410e9eead4b6 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for the task: (returnval){ [ 1701.079460] env[62405]: value = "task-1947368" [ 1701.079460] env[62405]: _type = "Task" [ 1701.079460] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1701.091359] env[62405]: DEBUG oslo_vmware.api [None req-891c1657-6ad6-4516-8dc8-410e9eead4b6 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947368, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1701.290134] env[62405]: DEBUG oslo_vmware.api [None req-8c1b8076-ff38-43f7-a3f4-0dbc196ee36a tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Task: {'id': task-1947365, 'name': ReconfigVM_Task, 'duration_secs': 0.259681} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1701.290460] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-8c1b8076-ff38-43f7-a3f4-0dbc196ee36a tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Updating instance '3c9487ff-2092-4cde-82d5-b38e5bc5c6e3' progress to 33 {{(pid=62405) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1701.444427] env[62405]: DEBUG nova.compute.manager [None req-27b60465-7ba5-442f-bd81-ad53bbfe7350 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Found 1 images (rotation: 2) {{(pid=62405) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 1701.516015] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56cd665c-0128-4b4b-b401-06147bd9d370 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.524819] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47ae4207-6403-4378-8272-86f2b29403a7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.562222] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1135b9b0-9305-4f74-9401-cc9c87e9d54b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.570876] env[62405]: DEBUG oslo_vmware.api [None req-42e76222-99f8-4abe-b058-8b52c8b5f5b9 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Task: {'id': task-1947367, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.190888} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1701.573060] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-42e76222-99f8-4abe-b058-8b52c8b5f5b9 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1701.573320] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-42e76222-99f8-4abe-b058-8b52c8b5f5b9 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] [instance: b4693268-4d12-4c96-a8f9-7b1bb9705c89] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1701.573545] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-42e76222-99f8-4abe-b058-8b52c8b5f5b9 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] [instance: b4693268-4d12-4c96-a8f9-7b1bb9705c89] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1701.573760] env[62405]: INFO nova.compute.manager [None req-42e76222-99f8-4abe-b058-8b52c8b5f5b9 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] [instance: b4693268-4d12-4c96-a8f9-7b1bb9705c89] Took 1.27 seconds to destroy the instance on the hypervisor. [ 1701.574052] env[62405]: DEBUG oslo.service.loopingcall [None req-42e76222-99f8-4abe-b058-8b52c8b5f5b9 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1701.574346] env[62405]: DEBUG nova.compute.manager [-] [instance: b4693268-4d12-4c96-a8f9-7b1bb9705c89] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1701.574478] env[62405]: DEBUG nova.network.neutron [-] [instance: b4693268-4d12-4c96-a8f9-7b1bb9705c89] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1701.577047] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad329e20-a19b-44f7-8d20-0cab96439f7e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.596655] env[62405]: DEBUG nova.compute.provider_tree [None req-71ec4483-05a9-473d-953a-ec97012f914e tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1701.598122] env[62405]: DEBUG oslo_vmware.api [None req-891c1657-6ad6-4516-8dc8-410e9eead4b6 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947368, 'name': RemoveSnapshot_Task, 'duration_secs': 0.390189} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1701.598725] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-891c1657-6ad6-4516-8dc8-410e9eead4b6 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 6c6a3974-c87e-47ed-a025-d6221a8decd7] Deleted Snapshot of the VM instance {{(pid=62405) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1701.599053] env[62405]: DEBUG nova.compute.manager [None req-891c1657-6ad6-4516-8dc8-410e9eead4b6 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 6c6a3974-c87e-47ed-a025-d6221a8decd7] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1701.599818] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8b5681b-792c-4179-8346-e2db2657fdf1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.801747] env[62405]: DEBUG nova.virt.hardware [None req-8c1b8076-ff38-43f7-a3f4-0dbc196ee36a tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:23:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='dc0b8b8d-2143-43d6-88ba-cc2419f1681a',id=27,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1321132944',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1701.801998] env[62405]: DEBUG nova.virt.hardware [None req-8c1b8076-ff38-43f7-a3f4-0dbc196ee36a tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1701.802175] env[62405]: DEBUG nova.virt.hardware [None req-8c1b8076-ff38-43f7-a3f4-0dbc196ee36a tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1701.802359] env[62405]: DEBUG nova.virt.hardware [None req-8c1b8076-ff38-43f7-a3f4-0dbc196ee36a tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1701.802532] env[62405]: DEBUG nova.virt.hardware [None req-8c1b8076-ff38-43f7-a3f4-0dbc196ee36a tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1701.802655] env[62405]: DEBUG nova.virt.hardware [None req-8c1b8076-ff38-43f7-a3f4-0dbc196ee36a tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1701.802856] env[62405]: DEBUG nova.virt.hardware [None req-8c1b8076-ff38-43f7-a3f4-0dbc196ee36a tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1701.803119] env[62405]: DEBUG nova.virt.hardware [None req-8c1b8076-ff38-43f7-a3f4-0dbc196ee36a tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1701.803854] env[62405]: DEBUG nova.virt.hardware [None req-8c1b8076-ff38-43f7-a3f4-0dbc196ee36a tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1701.803854] env[62405]: DEBUG nova.virt.hardware [None req-8c1b8076-ff38-43f7-a3f4-0dbc196ee36a tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1701.803854] env[62405]: DEBUG nova.virt.hardware [None req-8c1b8076-ff38-43f7-a3f4-0dbc196ee36a tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1701.809371] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-8c1b8076-ff38-43f7-a3f4-0dbc196ee36a tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Reconfiguring VM instance instance-0000002d to detach disk 2000 {{(pid=62405) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1701.809469] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1eeff75f-3eec-4b40-8187-fe539459ea7b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.832850] env[62405]: DEBUG oslo_vmware.api [None req-8c1b8076-ff38-43f7-a3f4-0dbc196ee36a tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Waiting for the task: (returnval){ [ 1701.832850] env[62405]: value = "task-1947369" [ 1701.832850] env[62405]: _type = "Task" [ 1701.832850] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1701.841720] env[62405]: DEBUG oslo_vmware.api [None req-8c1b8076-ff38-43f7-a3f4-0dbc196ee36a tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Task: {'id': task-1947369, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1702.101484] env[62405]: DEBUG nova.scheduler.client.report [None req-71ec4483-05a9-473d-953a-ec97012f914e tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1702.114243] env[62405]: INFO nova.compute.manager [None req-891c1657-6ad6-4516-8dc8-410e9eead4b6 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 6c6a3974-c87e-47ed-a025-d6221a8decd7] Shelve offloading [ 1702.139021] env[62405]: DEBUG nova.compute.manager [None req-a406d4cf-af98-42c4-bf36-7b8b7b567131 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1702.139021] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-150dbae5-16a8-4e40-a7ae-e2aac68fd4d3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.214067] env[62405]: DEBUG nova.compute.manager [req-c853e617-09bc-4b34-a2d8-ec8dc2fb3f48 req-4739d799-c34a-4d1d-82e2-aa2aae135401 service nova] [instance: b4693268-4d12-4c96-a8f9-7b1bb9705c89] Received event network-vif-deleted-a6d143aa-0ba8-425d-888b-e637ee77db16 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1702.214302] env[62405]: INFO nova.compute.manager [req-c853e617-09bc-4b34-a2d8-ec8dc2fb3f48 req-4739d799-c34a-4d1d-82e2-aa2aae135401 service nova] [instance: b4693268-4d12-4c96-a8f9-7b1bb9705c89] Neutron deleted interface a6d143aa-0ba8-425d-888b-e637ee77db16; detaching it from the instance and deleting it from the info cache [ 1702.214480] env[62405]: DEBUG nova.network.neutron [req-c853e617-09bc-4b34-a2d8-ec8dc2fb3f48 req-4739d799-c34a-4d1d-82e2-aa2aae135401 service nova] [instance: b4693268-4d12-4c96-a8f9-7b1bb9705c89] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1702.345836] env[62405]: DEBUG oslo_vmware.api [None req-8c1b8076-ff38-43f7-a3f4-0dbc196ee36a tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Task: {'id': task-1947369, 'name': ReconfigVM_Task, 'duration_secs': 0.180393} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1702.346429] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-8c1b8076-ff38-43f7-a3f4-0dbc196ee36a tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Reconfigured VM instance instance-0000002d to detach disk 2000 {{(pid=62405) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1702.347390] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71ca811a-5aa0-466c-ad7a-3dd5660801d8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.379020] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-8c1b8076-ff38-43f7-a3f4-0dbc196ee36a tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Reconfiguring VM instance instance-0000002d to attach disk [datastore1] 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3/3c9487ff-2092-4cde-82d5-b38e5bc5c6e3.vmdk or device None with type thin {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1702.379020] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-835cfd07-f5b3-4f2c-8c32-0f6b10642b00 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.401849] env[62405]: DEBUG oslo_vmware.api [None req-8c1b8076-ff38-43f7-a3f4-0dbc196ee36a tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Waiting for the task: (returnval){ [ 1702.401849] env[62405]: value = "task-1947370" [ 1702.401849] env[62405]: _type = "Task" [ 1702.401849] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1702.413451] env[62405]: DEBUG oslo_vmware.api [None req-8c1b8076-ff38-43f7-a3f4-0dbc196ee36a tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Task: {'id': task-1947370, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1702.519017] env[62405]: DEBUG nova.network.neutron [-] [instance: b4693268-4d12-4c96-a8f9-7b1bb9705c89] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1702.609036] env[62405]: DEBUG oslo_concurrency.lockutils [None req-71ec4483-05a9-473d-953a-ec97012f914e tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.663s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1702.609036] env[62405]: DEBUG nova.compute.manager [None req-71ec4483-05a9-473d-953a-ec97012f914e tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [instance: aae3abca-951a-4149-9ccb-d70bea218aea] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1702.610383] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b04e17b9-76b5-488f-8a85-8ffe1da4ec56 tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.328s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1702.618161] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-891c1657-6ad6-4516-8dc8-410e9eead4b6 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 6c6a3974-c87e-47ed-a025-d6221a8decd7] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1702.618514] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4c8a8e16-2755-410e-b908-684191176bfd {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.628182] env[62405]: DEBUG oslo_vmware.api [None req-891c1657-6ad6-4516-8dc8-410e9eead4b6 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for the task: (returnval){ [ 1702.628182] env[62405]: value = "task-1947371" [ 1702.628182] env[62405]: _type = "Task" [ 1702.628182] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1702.638778] env[62405]: DEBUG oslo_vmware.api [None req-891c1657-6ad6-4516-8dc8-410e9eead4b6 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947371, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1702.653035] env[62405]: INFO nova.compute.manager [None req-a406d4cf-af98-42c4-bf36-7b8b7b567131 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] instance snapshotting [ 1702.653035] env[62405]: DEBUG nova.objects.instance [None req-a406d4cf-af98-42c4-bf36-7b8b7b567131 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Lazy-loading 'flavor' on Instance uuid 15218373-ffa5-49ce-b604-423b7fc5fb35 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1702.720052] env[62405]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7b2b5463-ebc8-4308-a2b3-98925d5458a5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.731467] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f26d792a-86bc-46cd-8d4c-33f48e370484 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.768923] env[62405]: DEBUG nova.compute.manager [req-c853e617-09bc-4b34-a2d8-ec8dc2fb3f48 req-4739d799-c34a-4d1d-82e2-aa2aae135401 service nova] [instance: b4693268-4d12-4c96-a8f9-7b1bb9705c89] Detach interface failed, port_id=a6d143aa-0ba8-425d-888b-e637ee77db16, reason: Instance b4693268-4d12-4c96-a8f9-7b1bb9705c89 could not be found. {{(pid=62405) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11483}} [ 1702.916387] env[62405]: DEBUG oslo_vmware.api [None req-8c1b8076-ff38-43f7-a3f4-0dbc196ee36a tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Task: {'id': task-1947370, 'name': ReconfigVM_Task, 'duration_secs': 0.276591} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1702.916387] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-8c1b8076-ff38-43f7-a3f4-0dbc196ee36a tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Reconfigured VM instance instance-0000002d to attach disk [datastore1] 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3/3c9487ff-2092-4cde-82d5-b38e5bc5c6e3.vmdk or device None with type thin {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1702.916672] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-8c1b8076-ff38-43f7-a3f4-0dbc196ee36a tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Updating instance '3c9487ff-2092-4cde-82d5-b38e5bc5c6e3' progress to 50 {{(pid=62405) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1703.019872] env[62405]: INFO nova.compute.manager [-] [instance: b4693268-4d12-4c96-a8f9-7b1bb9705c89] Took 1.45 seconds to deallocate network for instance. [ 1703.108888] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00f6dada-4f03-4427-ad95-eb6f53af0a1b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.117591] env[62405]: DEBUG nova.compute.utils [None req-71ec4483-05a9-473d-953a-ec97012f914e tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1703.118751] env[62405]: DEBUG nova.compute.manager [None req-71ec4483-05a9-473d-953a-ec97012f914e tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [instance: aae3abca-951a-4149-9ccb-d70bea218aea] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1703.119364] env[62405]: DEBUG nova.network.neutron [None req-71ec4483-05a9-473d-953a-ec97012f914e tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [instance: aae3abca-951a-4149-9ccb-d70bea218aea] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1703.129727] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31f8055c-5942-4e87-bb26-90df1530638b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.171145] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-891c1657-6ad6-4516-8dc8-410e9eead4b6 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 6c6a3974-c87e-47ed-a025-d6221a8decd7] VM already powered off {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1703.171496] env[62405]: DEBUG nova.compute.manager [None req-891c1657-6ad6-4516-8dc8-410e9eead4b6 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 6c6a3974-c87e-47ed-a025-d6221a8decd7] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1703.175038] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7edf6f7f-b4d9-4ab4-a4e2-64547efd4127 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.177912] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5fcabd5-c3b5-446f-bee5-0b49bec639fc {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.182893] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b98cfd64-1181-4bfd-ae91-94fa04ebcf1f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.187235] env[62405]: DEBUG nova.policy [None req-71ec4483-05a9-473d-953a-ec97012f914e tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7c9cac2fdc8246fd9bc4664cf94d1952', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '18b4edb74b5d4f7a95565aebf78c444f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1703.211627] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78ad1fad-4016-4e4e-9e03-0015628786db {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.216480] env[62405]: DEBUG oslo_concurrency.lockutils [None req-891c1657-6ad6-4516-8dc8-410e9eead4b6 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquiring lock "refresh_cache-6c6a3974-c87e-47ed-a025-d6221a8decd7" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1703.216657] env[62405]: DEBUG oslo_concurrency.lockutils [None req-891c1657-6ad6-4516-8dc8-410e9eead4b6 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquired lock "refresh_cache-6c6a3974-c87e-47ed-a025-d6221a8decd7" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1703.216834] env[62405]: DEBUG nova.network.neutron [None req-891c1657-6ad6-4516-8dc8-410e9eead4b6 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 6c6a3974-c87e-47ed-a025-d6221a8decd7] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1703.218505] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10ed33cb-187c-439d-8937-884e8f512435 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.238931] env[62405]: DEBUG nova.compute.provider_tree [None req-b04e17b9-76b5-488f-8a85-8ffe1da4ec56 tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1703.424024] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ded8ec05-73dd-435f-9a9b-30763000cb30 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.447301] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37291cca-b78c-4c73-8204-ae5367485109 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.467213] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-8c1b8076-ff38-43f7-a3f4-0dbc196ee36a tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Updating instance '3c9487ff-2092-4cde-82d5-b38e5bc5c6e3' progress to 67 {{(pid=62405) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1703.529994] env[62405]: DEBUG oslo_concurrency.lockutils [None req-42e76222-99f8-4abe-b058-8b52c8b5f5b9 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1703.562930] env[62405]: DEBUG nova.network.neutron [None req-71ec4483-05a9-473d-953a-ec97012f914e tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [instance: aae3abca-951a-4149-9ccb-d70bea218aea] Successfully created port: c2f0f942-0f3a-45ee-9b01-295f9c3a79cd {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1703.623447] env[62405]: DEBUG nova.compute.manager [None req-71ec4483-05a9-473d-953a-ec97012f914e tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [instance: aae3abca-951a-4149-9ccb-d70bea218aea] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1703.742434] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-a406d4cf-af98-42c4-bf36-7b8b7b567131 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Creating Snapshot of the VM instance {{(pid=62405) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1703.742736] env[62405]: DEBUG nova.scheduler.client.report [None req-b04e17b9-76b5-488f-8a85-8ffe1da4ec56 tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1703.745965] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-b9e9432d-9fdf-4952-9861-df3d52f7ecc5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.758786] env[62405]: DEBUG oslo_vmware.api [None req-a406d4cf-af98-42c4-bf36-7b8b7b567131 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for the task: (returnval){ [ 1703.758786] env[62405]: value = "task-1947372" [ 1703.758786] env[62405]: _type = "Task" [ 1703.758786] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1703.769838] env[62405]: DEBUG oslo_vmware.api [None req-a406d4cf-af98-42c4-bf36-7b8b7b567131 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1947372, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1704.034191] env[62405]: DEBUG nova.network.neutron [None req-891c1657-6ad6-4516-8dc8-410e9eead4b6 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 6c6a3974-c87e-47ed-a025-d6221a8decd7] Updating instance_info_cache with network_info: [{"id": "262764f7-a6da-4d37-a804-a4b6719d4a50", "address": "fa:16:3e:61:bc:57", "network": {"id": "7398b956-045a-440c-b8fc-34bad57dbc27", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1969082667-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "41626e27199f4370a2554bb243a72d41", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "50171613-b419-45e3-9ada-fcb6cd921428", "external-id": "nsx-vlan-transportzone-914", "segmentation_id": 914, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap262764f7-a6", "ovs_interfaceid": "262764f7-a6da-4d37-a804-a4b6719d4a50", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1704.095033] env[62405]: DEBUG nova.network.neutron [None req-8c1b8076-ff38-43f7-a3f4-0dbc196ee36a tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Port 7fbae16c-e943-4752-8a7e-92bdea130e1a binding to destination host cpu-1 is already ACTIVE {{(pid=62405) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1704.254028] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b04e17b9-76b5-488f-8a85-8ffe1da4ec56 tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.642s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1704.254028] env[62405]: INFO nova.compute.manager [None req-b04e17b9-76b5-488f-8a85-8ffe1da4ec56 tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] Successfully reverted task state from rebuilding on failure for instance. [ 1704.261256] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ffd96f4e-6412-4601-b309-798a439a57ab tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.201s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1704.263814] env[62405]: INFO nova.compute.claims [None req-ffd96f4e-6412-4601-b309-798a439a57ab tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 2c623c00-92f2-4cc4-8503-963c3308d708] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1704.279794] env[62405]: DEBUG oslo_vmware.api [None req-a406d4cf-af98-42c4-bf36-7b8b7b567131 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1947372, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1704.539547] env[62405]: DEBUG oslo_concurrency.lockutils [None req-891c1657-6ad6-4516-8dc8-410e9eead4b6 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Releasing lock "refresh_cache-6c6a3974-c87e-47ed-a025-d6221a8decd7" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1704.638967] env[62405]: DEBUG nova.compute.manager [None req-71ec4483-05a9-473d-953a-ec97012f914e tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [instance: aae3abca-951a-4149-9ccb-d70bea218aea] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1704.665596] env[62405]: DEBUG nova.virt.hardware [None req-71ec4483-05a9-473d-953a-ec97012f914e tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1704.667832] env[62405]: DEBUG nova.virt.hardware [None req-71ec4483-05a9-473d-953a-ec97012f914e tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1704.667832] env[62405]: DEBUG nova.virt.hardware [None req-71ec4483-05a9-473d-953a-ec97012f914e tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1704.667832] env[62405]: DEBUG nova.virt.hardware [None req-71ec4483-05a9-473d-953a-ec97012f914e tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1704.667832] env[62405]: DEBUG nova.virt.hardware [None req-71ec4483-05a9-473d-953a-ec97012f914e tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1704.667832] env[62405]: DEBUG nova.virt.hardware [None req-71ec4483-05a9-473d-953a-ec97012f914e tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1704.667832] env[62405]: DEBUG nova.virt.hardware [None req-71ec4483-05a9-473d-953a-ec97012f914e tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1704.667832] env[62405]: DEBUG nova.virt.hardware [None req-71ec4483-05a9-473d-953a-ec97012f914e tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1704.667832] env[62405]: DEBUG nova.virt.hardware [None req-71ec4483-05a9-473d-953a-ec97012f914e tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1704.667832] env[62405]: DEBUG nova.virt.hardware [None req-71ec4483-05a9-473d-953a-ec97012f914e tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1704.667832] env[62405]: DEBUG nova.virt.hardware [None req-71ec4483-05a9-473d-953a-ec97012f914e tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1704.668741] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d745c145-639b-471d-a6b5-8e2f4b3bf19b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.677949] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c23f395-3608-45e9-a1ac-215b123b39cc {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.751841] env[62405]: DEBUG oslo_concurrency.lockutils [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Acquiring lock "7256b956-e41a-40ec-a687-a129a8bafcb6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1704.752088] env[62405]: DEBUG oslo_concurrency.lockutils [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Lock "7256b956-e41a-40ec-a687-a129a8bafcb6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1704.782491] env[62405]: DEBUG oslo_vmware.api [None req-a406d4cf-af98-42c4-bf36-7b8b7b567131 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1947372, 'name': CreateSnapshot_Task, 'duration_secs': 0.548055} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1704.782491] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-a406d4cf-af98-42c4-bf36-7b8b7b567131 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Created Snapshot of the VM instance {{(pid=62405) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1704.783701] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f9628c3-6c68-46d8-909d-d260976522b0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.118637] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8c1b8076-ff38-43f7-a3f4-0dbc196ee36a tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Acquiring lock "3c9487ff-2092-4cde-82d5-b38e5bc5c6e3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1705.118982] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8c1b8076-ff38-43f7-a3f4-0dbc196ee36a tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Lock "3c9487ff-2092-4cde-82d5-b38e5bc5c6e3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1705.119134] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8c1b8076-ff38-43f7-a3f4-0dbc196ee36a tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Lock "3c9487ff-2092-4cde-82d5-b38e5bc5c6e3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1705.235053] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-891c1657-6ad6-4516-8dc8-410e9eead4b6 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 6c6a3974-c87e-47ed-a025-d6221a8decd7] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1705.236172] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99d61e9e-7aef-4afa-b125-1f4c37a3b570 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.244533] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-891c1657-6ad6-4516-8dc8-410e9eead4b6 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 6c6a3974-c87e-47ed-a025-d6221a8decd7] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1705.244797] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-03895fbd-997c-49ff-a4ec-aa29a74d48db {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.260087] env[62405]: DEBUG nova.network.neutron [None req-71ec4483-05a9-473d-953a-ec97012f914e tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [instance: aae3abca-951a-4149-9ccb-d70bea218aea] Successfully updated port: c2f0f942-0f3a-45ee-9b01-295f9c3a79cd {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1705.306405] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-a406d4cf-af98-42c4-bf36-7b8b7b567131 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Creating linked-clone VM from snapshot {{(pid=62405) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1705.306887] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-3899166e-4922-4f68-a6da-09f011fce3e8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.316158] env[62405]: DEBUG oslo_vmware.api [None req-a406d4cf-af98-42c4-bf36-7b8b7b567131 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for the task: (returnval){ [ 1705.316158] env[62405]: value = "task-1947374" [ 1705.316158] env[62405]: _type = "Task" [ 1705.316158] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1705.327745] env[62405]: DEBUG oslo_vmware.api [None req-a406d4cf-af98-42c4-bf36-7b8b7b567131 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1947374, 'name': CloneVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1705.449646] env[62405]: DEBUG oslo_concurrency.lockutils [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Acquiring lock "9aa9e0de-7314-4d8b-8e9f-b6d330cae914" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1705.450263] env[62405]: DEBUG oslo_concurrency.lockutils [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Lock "9aa9e0de-7314-4d8b-8e9f-b6d330cae914" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1705.567077] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-891c1657-6ad6-4516-8dc8-410e9eead4b6 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 6c6a3974-c87e-47ed-a025-d6221a8decd7] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1705.567296] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-891c1657-6ad6-4516-8dc8-410e9eead4b6 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 6c6a3974-c87e-47ed-a025-d6221a8decd7] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1705.567472] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-891c1657-6ad6-4516-8dc8-410e9eead4b6 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Deleting the datastore file [datastore1] 6c6a3974-c87e-47ed-a025-d6221a8decd7 {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1705.567748] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-efa7c58a-8874-44d4-a1c4-ea8723abc333 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.576777] env[62405]: DEBUG oslo_vmware.api [None req-891c1657-6ad6-4516-8dc8-410e9eead4b6 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for the task: (returnval){ [ 1705.576777] env[62405]: value = "task-1947375" [ 1705.576777] env[62405]: _type = "Task" [ 1705.576777] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1705.589424] env[62405]: DEBUG nova.compute.manager [req-cd907494-4063-4da5-963c-90cd3fb06f1f req-68dde6e2-c028-4154-8f79-3d52687806f3 service nova] [instance: aae3abca-951a-4149-9ccb-d70bea218aea] Received event network-vif-plugged-c2f0f942-0f3a-45ee-9b01-295f9c3a79cd {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1705.589686] env[62405]: DEBUG oslo_concurrency.lockutils [req-cd907494-4063-4da5-963c-90cd3fb06f1f req-68dde6e2-c028-4154-8f79-3d52687806f3 service nova] Acquiring lock "aae3abca-951a-4149-9ccb-d70bea218aea-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1705.589933] env[62405]: DEBUG oslo_concurrency.lockutils [req-cd907494-4063-4da5-963c-90cd3fb06f1f req-68dde6e2-c028-4154-8f79-3d52687806f3 service nova] Lock "aae3abca-951a-4149-9ccb-d70bea218aea-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1705.590914] env[62405]: DEBUG oslo_concurrency.lockutils [req-cd907494-4063-4da5-963c-90cd3fb06f1f req-68dde6e2-c028-4154-8f79-3d52687806f3 service nova] Lock "aae3abca-951a-4149-9ccb-d70bea218aea-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1705.590914] env[62405]: DEBUG nova.compute.manager [req-cd907494-4063-4da5-963c-90cd3fb06f1f req-68dde6e2-c028-4154-8f79-3d52687806f3 service nova] [instance: aae3abca-951a-4149-9ccb-d70bea218aea] No waiting events found dispatching network-vif-plugged-c2f0f942-0f3a-45ee-9b01-295f9c3a79cd {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1705.590914] env[62405]: WARNING nova.compute.manager [req-cd907494-4063-4da5-963c-90cd3fb06f1f req-68dde6e2-c028-4154-8f79-3d52687806f3 service nova] [instance: aae3abca-951a-4149-9ccb-d70bea218aea] Received unexpected event network-vif-plugged-c2f0f942-0f3a-45ee-9b01-295f9c3a79cd for instance with vm_state building and task_state spawning. [ 1705.594215] env[62405]: DEBUG oslo_vmware.api [None req-891c1657-6ad6-4516-8dc8-410e9eead4b6 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947375, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1705.602488] env[62405]: DEBUG nova.compute.manager [req-552398c5-2e1c-4248-b6fd-0de1477f5def req-2036a011-f237-4eec-bdf5-cd8a152065cc service nova] [instance: 6c6a3974-c87e-47ed-a025-d6221a8decd7] Received event network-vif-unplugged-262764f7-a6da-4d37-a804-a4b6719d4a50 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1705.602688] env[62405]: DEBUG oslo_concurrency.lockutils [req-552398c5-2e1c-4248-b6fd-0de1477f5def req-2036a011-f237-4eec-bdf5-cd8a152065cc service nova] Acquiring lock "6c6a3974-c87e-47ed-a025-d6221a8decd7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1705.602887] env[62405]: DEBUG oslo_concurrency.lockutils [req-552398c5-2e1c-4248-b6fd-0de1477f5def req-2036a011-f237-4eec-bdf5-cd8a152065cc service nova] Lock "6c6a3974-c87e-47ed-a025-d6221a8decd7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1705.603060] env[62405]: DEBUG oslo_concurrency.lockutils [req-552398c5-2e1c-4248-b6fd-0de1477f5def req-2036a011-f237-4eec-bdf5-cd8a152065cc service nova] Lock "6c6a3974-c87e-47ed-a025-d6221a8decd7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1705.603256] env[62405]: DEBUG nova.compute.manager [req-552398c5-2e1c-4248-b6fd-0de1477f5def req-2036a011-f237-4eec-bdf5-cd8a152065cc service nova] [instance: 6c6a3974-c87e-47ed-a025-d6221a8decd7] No waiting events found dispatching network-vif-unplugged-262764f7-a6da-4d37-a804-a4b6719d4a50 {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1705.603432] env[62405]: WARNING nova.compute.manager [req-552398c5-2e1c-4248-b6fd-0de1477f5def req-2036a011-f237-4eec-bdf5-cd8a152065cc service nova] [instance: 6c6a3974-c87e-47ed-a025-d6221a8decd7] Received unexpected event network-vif-unplugged-262764f7-a6da-4d37-a804-a4b6719d4a50 for instance with vm_state shelved and task_state shelving_offloading. [ 1705.766962] env[62405]: DEBUG oslo_concurrency.lockutils [None req-71ec4483-05a9-473d-953a-ec97012f914e tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Acquiring lock "refresh_cache-aae3abca-951a-4149-9ccb-d70bea218aea" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1705.767124] env[62405]: DEBUG oslo_concurrency.lockutils [None req-71ec4483-05a9-473d-953a-ec97012f914e tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Acquired lock "refresh_cache-aae3abca-951a-4149-9ccb-d70bea218aea" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1705.767322] env[62405]: DEBUG nova.network.neutron [None req-71ec4483-05a9-473d-953a-ec97012f914e tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [instance: aae3abca-951a-4149-9ccb-d70bea218aea] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1705.791855] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce592bee-8ce8-427c-a945-e23a66ffc92f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.805548] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e3bc8c7-9de7-4f3c-8549-c5dc60ab226e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.845719] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2af88d22-1031-4b41-a18a-00a82a871e5f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.857699] env[62405]: DEBUG oslo_vmware.api [None req-a406d4cf-af98-42c4-bf36-7b8b7b567131 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1947374, 'name': CloneVM_Task} progress is 94%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1705.859124] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-942debae-d8d1-426c-b228-f86085c86367 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.878533] env[62405]: DEBUG nova.compute.provider_tree [None req-ffd96f4e-6412-4601-b309-798a439a57ab tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1706.089423] env[62405]: DEBUG oslo_vmware.api [None req-891c1657-6ad6-4516-8dc8-410e9eead4b6 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947375, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.13966} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1706.089860] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-891c1657-6ad6-4516-8dc8-410e9eead4b6 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1706.090569] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-891c1657-6ad6-4516-8dc8-410e9eead4b6 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 6c6a3974-c87e-47ed-a025-d6221a8decd7] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1706.090569] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-891c1657-6ad6-4516-8dc8-410e9eead4b6 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 6c6a3974-c87e-47ed-a025-d6221a8decd7] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1706.125118] env[62405]: INFO nova.scheduler.client.report [None req-891c1657-6ad6-4516-8dc8-410e9eead4b6 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Deleted allocations for instance 6c6a3974-c87e-47ed-a025-d6221a8decd7 [ 1706.185660] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8c1b8076-ff38-43f7-a3f4-0dbc196ee36a tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Acquiring lock "refresh_cache-3c9487ff-2092-4cde-82d5-b38e5bc5c6e3" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1706.185854] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8c1b8076-ff38-43f7-a3f4-0dbc196ee36a tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Acquired lock "refresh_cache-3c9487ff-2092-4cde-82d5-b38e5bc5c6e3" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1706.186154] env[62405]: DEBUG nova.network.neutron [None req-8c1b8076-ff38-43f7-a3f4-0dbc196ee36a tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1706.257461] env[62405]: DEBUG oslo_concurrency.lockutils [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Acquiring lock "48554024-9b6f-44be-b21e-615b25cd790c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1706.257946] env[62405]: DEBUG oslo_concurrency.lockutils [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Lock "48554024-9b6f-44be-b21e-615b25cd790c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1706.315610] env[62405]: DEBUG nova.network.neutron [None req-71ec4483-05a9-473d-953a-ec97012f914e tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [instance: aae3abca-951a-4149-9ccb-d70bea218aea] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1706.354714] env[62405]: DEBUG oslo_vmware.api [None req-a406d4cf-af98-42c4-bf36-7b8b7b567131 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1947374, 'name': CloneVM_Task} progress is 95%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1706.382839] env[62405]: DEBUG nova.scheduler.client.report [None req-ffd96f4e-6412-4601-b309-798a439a57ab tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1706.488592] env[62405]: DEBUG nova.network.neutron [None req-71ec4483-05a9-473d-953a-ec97012f914e tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [instance: aae3abca-951a-4149-9ccb-d70bea218aea] Updating instance_info_cache with network_info: [{"id": "c2f0f942-0f3a-45ee-9b01-295f9c3a79cd", "address": "fa:16:3e:8b:df:d2", "network": {"id": "5f90763f-2c20-4d8a-9274-7e692071a6cd", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1386170130-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "18b4edb74b5d4f7a95565aebf78c444f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d8383707-f093-40a7-a5ba-31b0e07cac45", "external-id": "cl2-zone-18", "segmentation_id": 18, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc2f0f942-0f", "ovs_interfaceid": "c2f0f942-0f3a-45ee-9b01-295f9c3a79cd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1706.627798] env[62405]: DEBUG oslo_concurrency.lockutils [None req-891c1657-6ad6-4516-8dc8-410e9eead4b6 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1706.864634] env[62405]: DEBUG oslo_vmware.api [None req-a406d4cf-af98-42c4-bf36-7b8b7b567131 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1947374, 'name': CloneVM_Task, 'duration_secs': 1.187162} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1706.866090] env[62405]: INFO nova.virt.vmwareapi.vmops [None req-a406d4cf-af98-42c4-bf36-7b8b7b567131 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Created linked-clone VM from snapshot [ 1706.866090] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe276e48-5e6a-46de-b526-761d310c30eb {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.875887] env[62405]: DEBUG nova.virt.vmwareapi.images [None req-a406d4cf-af98-42c4-bf36-7b8b7b567131 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Uploading image d7233774-277c-4c93-9db4-49320793fa07 {{(pid=62405) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1706.888664] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ffd96f4e-6412-4601-b309-798a439a57ab tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.627s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1706.889296] env[62405]: DEBUG nova.compute.manager [None req-ffd96f4e-6412-4601-b309-798a439a57ab tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 2c623c00-92f2-4cc4-8503-963c3308d708] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1706.892334] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 37.187s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1706.895250] env[62405]: INFO nova.compute.claims [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] [instance: 4c8c0d2f-d8d3-4422-8a5c-8999636b22be] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1706.914658] env[62405]: DEBUG oslo_vmware.rw_handles [None req-a406d4cf-af98-42c4-bf36-7b8b7b567131 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1706.914658] env[62405]: value = "vm-401454" [ 1706.914658] env[62405]: _type = "VirtualMachine" [ 1706.914658] env[62405]: }. {{(pid=62405) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1706.914901] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-026ddaa4-4887-4959-8103-a366b2ab99de {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.924670] env[62405]: DEBUG oslo_vmware.rw_handles [None req-a406d4cf-af98-42c4-bf36-7b8b7b567131 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Lease: (returnval){ [ 1706.924670] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5222b751-0c22-4063-1e08-b97af797add3" [ 1706.924670] env[62405]: _type = "HttpNfcLease" [ 1706.924670] env[62405]: } obtained for exporting VM: (result){ [ 1706.924670] env[62405]: value = "vm-401454" [ 1706.924670] env[62405]: _type = "VirtualMachine" [ 1706.924670] env[62405]: }. {{(pid=62405) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1706.924670] env[62405]: DEBUG oslo_vmware.api [None req-a406d4cf-af98-42c4-bf36-7b8b7b567131 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for the lease: (returnval){ [ 1706.924670] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5222b751-0c22-4063-1e08-b97af797add3" [ 1706.924670] env[62405]: _type = "HttpNfcLease" [ 1706.924670] env[62405]: } to be ready. {{(pid=62405) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1706.934041] env[62405]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1706.934041] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5222b751-0c22-4063-1e08-b97af797add3" [ 1706.934041] env[62405]: _type = "HttpNfcLease" [ 1706.934041] env[62405]: } is initializing. {{(pid=62405) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1706.989751] env[62405]: DEBUG oslo_concurrency.lockutils [None req-71ec4483-05a9-473d-953a-ec97012f914e tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Releasing lock "refresh_cache-aae3abca-951a-4149-9ccb-d70bea218aea" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1706.989939] env[62405]: DEBUG nova.compute.manager [None req-71ec4483-05a9-473d-953a-ec97012f914e tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [instance: aae3abca-951a-4149-9ccb-d70bea218aea] Instance network_info: |[{"id": "c2f0f942-0f3a-45ee-9b01-295f9c3a79cd", "address": "fa:16:3e:8b:df:d2", "network": {"id": "5f90763f-2c20-4d8a-9274-7e692071a6cd", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1386170130-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "18b4edb74b5d4f7a95565aebf78c444f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d8383707-f093-40a7-a5ba-31b0e07cac45", "external-id": "cl2-zone-18", "segmentation_id": 18, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc2f0f942-0f", "ovs_interfaceid": "c2f0f942-0f3a-45ee-9b01-295f9c3a79cd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1706.990375] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-71ec4483-05a9-473d-953a-ec97012f914e tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [instance: aae3abca-951a-4149-9ccb-d70bea218aea] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8b:df:d2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd8383707-f093-40a7-a5ba-31b0e07cac45', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c2f0f942-0f3a-45ee-9b01-295f9c3a79cd', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1707.000240] env[62405]: DEBUG oslo.service.loopingcall [None req-71ec4483-05a9-473d-953a-ec97012f914e tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1707.000726] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aae3abca-951a-4149-9ccb-d70bea218aea] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1707.001093] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3ba54b84-36f1-4fba-9c7c-6704b7acc56c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.027017] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1707.027017] env[62405]: value = "task-1947377" [ 1707.027017] env[62405]: _type = "Task" [ 1707.027017] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1707.035813] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947377, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1707.051437] env[62405]: DEBUG nova.network.neutron [None req-8c1b8076-ff38-43f7-a3f4-0dbc196ee36a tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Updating instance_info_cache with network_info: [{"id": "7fbae16c-e943-4752-8a7e-92bdea130e1a", "address": "fa:16:3e:f9:2e:fa", "network": {"id": "9e3e6e3d-df77-428f-b577-e4a42e82ec43", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.63", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "69dc3a146cd14230b1180689e2fea090", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31e77685-b4dd-4810-80ef-24115ea9ea62", "external-id": "nsx-vlan-transportzone-56", "segmentation_id": 56, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7fbae16c-e9", "ovs_interfaceid": "7fbae16c-e943-4752-8a7e-92bdea130e1a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1707.404556] env[62405]: DEBUG nova.compute.utils [None req-ffd96f4e-6412-4601-b309-798a439a57ab tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1707.405976] env[62405]: DEBUG nova.compute.manager [None req-ffd96f4e-6412-4601-b309-798a439a57ab tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 2c623c00-92f2-4cc4-8503-963c3308d708] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1707.406162] env[62405]: DEBUG nova.network.neutron [None req-ffd96f4e-6412-4601-b309-798a439a57ab tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 2c623c00-92f2-4cc4-8503-963c3308d708] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1707.436088] env[62405]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1707.436088] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5222b751-0c22-4063-1e08-b97af797add3" [ 1707.436088] env[62405]: _type = "HttpNfcLease" [ 1707.436088] env[62405]: } is ready. {{(pid=62405) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1707.436570] env[62405]: DEBUG oslo_vmware.rw_handles [None req-a406d4cf-af98-42c4-bf36-7b8b7b567131 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1707.436570] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5222b751-0c22-4063-1e08-b97af797add3" [ 1707.436570] env[62405]: _type = "HttpNfcLease" [ 1707.436570] env[62405]: }. {{(pid=62405) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1707.437361] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f97a6848-bed9-441e-a52f-d7fa3c4202ae {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.446830] env[62405]: DEBUG oslo_vmware.rw_handles [None req-a406d4cf-af98-42c4-bf36-7b8b7b567131 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52d5ad54-5f89-e331-05e1-c552cd3cda73/disk-0.vmdk from lease info. {{(pid=62405) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1707.447065] env[62405]: DEBUG oslo_vmware.rw_handles [None req-a406d4cf-af98-42c4-bf36-7b8b7b567131 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52d5ad54-5f89-e331-05e1-c552cd3cda73/disk-0.vmdk for reading. {{(pid=62405) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1707.505862] env[62405]: DEBUG nova.policy [None req-ffd96f4e-6412-4601-b309-798a439a57ab tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '96a739701a824313b30b0d214f43757b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6014bab6bc9a4b059bab88e44b31f446', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1707.537836] env[62405]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-b4120931-a4f3-4e29-84c9-3229605e898b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.539753] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947377, 'name': CreateVM_Task, 'duration_secs': 0.362718} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1707.542456] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aae3abca-951a-4149-9ccb-d70bea218aea] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1707.543284] env[62405]: DEBUG oslo_concurrency.lockutils [None req-71ec4483-05a9-473d-953a-ec97012f914e tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1707.543463] env[62405]: DEBUG oslo_concurrency.lockutils [None req-71ec4483-05a9-473d-953a-ec97012f914e tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1707.543772] env[62405]: DEBUG oslo_concurrency.lockutils [None req-71ec4483-05a9-473d-953a-ec97012f914e tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1707.544298] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-38596f49-88ca-4c62-8e01-981cb2cfa46d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.552236] env[62405]: DEBUG oslo_vmware.api [None req-71ec4483-05a9-473d-953a-ec97012f914e tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Waiting for the task: (returnval){ [ 1707.552236] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52389b20-af78-a169-210d-eb5bd51a5943" [ 1707.552236] env[62405]: _type = "Task" [ 1707.552236] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1707.558348] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8c1b8076-ff38-43f7-a3f4-0dbc196ee36a tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Releasing lock "refresh_cache-3c9487ff-2092-4cde-82d5-b38e5bc5c6e3" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1707.571614] env[62405]: DEBUG oslo_vmware.api [None req-71ec4483-05a9-473d-953a-ec97012f914e tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52389b20-af78-a169-210d-eb5bd51a5943, 'name': SearchDatastore_Task, 'duration_secs': 0.010306} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1707.576731] env[62405]: DEBUG oslo_concurrency.lockutils [None req-71ec4483-05a9-473d-953a-ec97012f914e tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1707.576968] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-71ec4483-05a9-473d-953a-ec97012f914e tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [instance: aae3abca-951a-4149-9ccb-d70bea218aea] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1707.577208] env[62405]: DEBUG oslo_concurrency.lockutils [None req-71ec4483-05a9-473d-953a-ec97012f914e tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1707.577365] env[62405]: DEBUG oslo_concurrency.lockutils [None req-71ec4483-05a9-473d-953a-ec97012f914e tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1707.577537] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-71ec4483-05a9-473d-953a-ec97012f914e tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1707.578029] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-201fc924-fe47-4974-960b-9c0c20f0fbd9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.587170] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-71ec4483-05a9-473d-953a-ec97012f914e tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1707.587343] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-71ec4483-05a9-473d-953a-ec97012f914e tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1707.588048] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7ebf6be3-0796-44e0-9287-5a57b64166bc {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.593735] env[62405]: DEBUG oslo_vmware.api [None req-71ec4483-05a9-473d-953a-ec97012f914e tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Waiting for the task: (returnval){ [ 1707.593735] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]524cabf4-08d8-bdeb-16a4-bce6f43024d4" [ 1707.593735] env[62405]: _type = "Task" [ 1707.593735] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1707.602899] env[62405]: DEBUG oslo_vmware.api [None req-71ec4483-05a9-473d-953a-ec97012f914e tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]524cabf4-08d8-bdeb-16a4-bce6f43024d4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1707.807981] env[62405]: DEBUG nova.network.neutron [None req-ffd96f4e-6412-4601-b309-798a439a57ab tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 2c623c00-92f2-4cc4-8503-963c3308d708] Successfully created port: 3cb3354b-4416-4325-9602-8abc5afe9861 {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1707.862723] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8c58fd55-3089-4e5b-8553-1e3ba26179e5 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquiring lock "6c6a3974-c87e-47ed-a025-d6221a8decd7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1707.914140] env[62405]: DEBUG nova.compute.manager [None req-ffd96f4e-6412-4601-b309-798a439a57ab tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 2c623c00-92f2-4cc4-8503-963c3308d708] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1708.092275] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e06434d5-ff68-4683-b128-fa9796b647f9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.113173] env[62405]: DEBUG oslo_vmware.api [None req-71ec4483-05a9-473d-953a-ec97012f914e tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]524cabf4-08d8-bdeb-16a4-bce6f43024d4, 'name': SearchDatastore_Task, 'duration_secs': 0.009503} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1708.133041] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5b3ea136-3354-4bd5-8640-d0aeecca774f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.133629] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2de29437-9bb0-4b3c-8a89-437f01ece7bf {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.146384] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-8c1b8076-ff38-43f7-a3f4-0dbc196ee36a tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Updating instance '3c9487ff-2092-4cde-82d5-b38e5bc5c6e3' progress to 83 {{(pid=62405) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1708.160251] env[62405]: DEBUG nova.compute.manager [req-0f49d25d-513f-4fc6-bc55-83c01725a2da req-746381e4-211d-480b-88fb-6e9c69b073ad service nova] [instance: aae3abca-951a-4149-9ccb-d70bea218aea] Received event network-changed-c2f0f942-0f3a-45ee-9b01-295f9c3a79cd {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1708.160251] env[62405]: DEBUG nova.compute.manager [req-0f49d25d-513f-4fc6-bc55-83c01725a2da req-746381e4-211d-480b-88fb-6e9c69b073ad service nova] [instance: aae3abca-951a-4149-9ccb-d70bea218aea] Refreshing instance network info cache due to event network-changed-c2f0f942-0f3a-45ee-9b01-295f9c3a79cd. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1708.160251] env[62405]: DEBUG oslo_concurrency.lockutils [req-0f49d25d-513f-4fc6-bc55-83c01725a2da req-746381e4-211d-480b-88fb-6e9c69b073ad service nova] Acquiring lock "refresh_cache-aae3abca-951a-4149-9ccb-d70bea218aea" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1708.160251] env[62405]: DEBUG oslo_concurrency.lockutils [req-0f49d25d-513f-4fc6-bc55-83c01725a2da req-746381e4-211d-480b-88fb-6e9c69b073ad service nova] Acquired lock "refresh_cache-aae3abca-951a-4149-9ccb-d70bea218aea" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1708.160251] env[62405]: DEBUG nova.network.neutron [req-0f49d25d-513f-4fc6-bc55-83c01725a2da req-746381e4-211d-480b-88fb-6e9c69b073ad service nova] [instance: aae3abca-951a-4149-9ccb-d70bea218aea] Refreshing network info cache for port c2f0f942-0f3a-45ee-9b01-295f9c3a79cd {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1708.165229] env[62405]: DEBUG oslo_vmware.api [None req-71ec4483-05a9-473d-953a-ec97012f914e tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Waiting for the task: (returnval){ [ 1708.165229] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52bd0017-20f6-2a55-8c61-1da7f1f54cb6" [ 1708.165229] env[62405]: _type = "Task" [ 1708.165229] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1708.174182] env[62405]: DEBUG nova.compute.manager [req-ea699f27-f75a-4ff2-9429-68558fd53770 req-2c74bfc9-ae68-491f-a343-ef53dc01dc8a service nova] [instance: 6c6a3974-c87e-47ed-a025-d6221a8decd7] Received event network-changed-262764f7-a6da-4d37-a804-a4b6719d4a50 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1708.174182] env[62405]: DEBUG nova.compute.manager [req-ea699f27-f75a-4ff2-9429-68558fd53770 req-2c74bfc9-ae68-491f-a343-ef53dc01dc8a service nova] [instance: 6c6a3974-c87e-47ed-a025-d6221a8decd7] Refreshing instance network info cache due to event network-changed-262764f7-a6da-4d37-a804-a4b6719d4a50. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1708.174182] env[62405]: DEBUG oslo_concurrency.lockutils [req-ea699f27-f75a-4ff2-9429-68558fd53770 req-2c74bfc9-ae68-491f-a343-ef53dc01dc8a service nova] Acquiring lock "refresh_cache-6c6a3974-c87e-47ed-a025-d6221a8decd7" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1708.174182] env[62405]: DEBUG oslo_concurrency.lockutils [req-ea699f27-f75a-4ff2-9429-68558fd53770 req-2c74bfc9-ae68-491f-a343-ef53dc01dc8a service nova] Acquired lock "refresh_cache-6c6a3974-c87e-47ed-a025-d6221a8decd7" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1708.174530] env[62405]: DEBUG nova.network.neutron [req-ea699f27-f75a-4ff2-9429-68558fd53770 req-2c74bfc9-ae68-491f-a343-ef53dc01dc8a service nova] [instance: 6c6a3974-c87e-47ed-a025-d6221a8decd7] Refreshing network info cache for port 262764f7-a6da-4d37-a804-a4b6719d4a50 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1708.185054] env[62405]: DEBUG oslo_vmware.api [None req-71ec4483-05a9-473d-953a-ec97012f914e tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52bd0017-20f6-2a55-8c61-1da7f1f54cb6, 'name': SearchDatastore_Task, 'duration_secs': 0.010578} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1708.186856] env[62405]: DEBUG oslo_concurrency.lockutils [None req-71ec4483-05a9-473d-953a-ec97012f914e tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1708.187239] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-71ec4483-05a9-473d-953a-ec97012f914e tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] aae3abca-951a-4149-9ccb-d70bea218aea/aae3abca-951a-4149-9ccb-d70bea218aea.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1708.187509] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0be6d48e-1aa9-4ab0-84cf-dec4d9979279 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.198323] env[62405]: DEBUG oslo_vmware.api [None req-71ec4483-05a9-473d-953a-ec97012f914e tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Waiting for the task: (returnval){ [ 1708.198323] env[62405]: value = "task-1947378" [ 1708.198323] env[62405]: _type = "Task" [ 1708.198323] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1708.210892] env[62405]: DEBUG oslo_vmware.api [None req-71ec4483-05a9-473d-953a-ec97012f914e tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Task: {'id': task-1947378, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1708.625473] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c7f8b0a-c026-45d9-8f7e-c3c32bb12e32 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.634500] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3983ec9b-8296-4804-906e-d48100ef51fb {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.667499] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c1b8076-ff38-43f7-a3f4-0dbc196ee36a tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1708.670348] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-42685bf3-d10d-41cd-bd07-aee9688a422d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.672667] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98b866bf-26dc-4ff9-b638-44545cc2f4d6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.683584] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2956bf8e-5068-422f-b37e-9b22c27a4383 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.689423] env[62405]: DEBUG oslo_vmware.api [None req-8c1b8076-ff38-43f7-a3f4-0dbc196ee36a tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Waiting for the task: (returnval){ [ 1708.689423] env[62405]: value = "task-1947379" [ 1708.689423] env[62405]: _type = "Task" [ 1708.689423] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1708.702408] env[62405]: DEBUG nova.compute.provider_tree [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1708.710736] env[62405]: DEBUG oslo_vmware.api [None req-8c1b8076-ff38-43f7-a3f4-0dbc196ee36a tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Task: {'id': task-1947379, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1708.723168] env[62405]: DEBUG oslo_vmware.api [None req-71ec4483-05a9-473d-953a-ec97012f914e tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Task: {'id': task-1947378, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.519451} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1708.723421] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-71ec4483-05a9-473d-953a-ec97012f914e tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] aae3abca-951a-4149-9ccb-d70bea218aea/aae3abca-951a-4149-9ccb-d70bea218aea.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1708.723782] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-71ec4483-05a9-473d-953a-ec97012f914e tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [instance: aae3abca-951a-4149-9ccb-d70bea218aea] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1708.725029] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8f8585bf-293a-4a18-acaf-87e212c785a4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.736470] env[62405]: DEBUG oslo_vmware.api [None req-71ec4483-05a9-473d-953a-ec97012f914e tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Waiting for the task: (returnval){ [ 1708.736470] env[62405]: value = "task-1947380" [ 1708.736470] env[62405]: _type = "Task" [ 1708.736470] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1708.748817] env[62405]: DEBUG oslo_vmware.api [None req-71ec4483-05a9-473d-953a-ec97012f914e tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Task: {'id': task-1947380, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1708.919170] env[62405]: DEBUG nova.network.neutron [req-ea699f27-f75a-4ff2-9429-68558fd53770 req-2c74bfc9-ae68-491f-a343-ef53dc01dc8a service nova] [instance: 6c6a3974-c87e-47ed-a025-d6221a8decd7] Updated VIF entry in instance network info cache for port 262764f7-a6da-4d37-a804-a4b6719d4a50. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1708.919170] env[62405]: DEBUG nova.network.neutron [req-ea699f27-f75a-4ff2-9429-68558fd53770 req-2c74bfc9-ae68-491f-a343-ef53dc01dc8a service nova] [instance: 6c6a3974-c87e-47ed-a025-d6221a8decd7] Updating instance_info_cache with network_info: [{"id": "262764f7-a6da-4d37-a804-a4b6719d4a50", "address": "fa:16:3e:61:bc:57", "network": {"id": "7398b956-045a-440c-b8fc-34bad57dbc27", "bridge": null, "label": "tempest-DeleteServersTestJSON-1969082667-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "41626e27199f4370a2554bb243a72d41", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap262764f7-a6", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1708.932220] env[62405]: DEBUG nova.compute.manager [None req-ffd96f4e-6412-4601-b309-798a439a57ab tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 2c623c00-92f2-4cc4-8503-963c3308d708] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1708.973200] env[62405]: DEBUG nova.virt.hardware [None req-ffd96f4e-6412-4601-b309-798a439a57ab tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1708.974408] env[62405]: DEBUG nova.virt.hardware [None req-ffd96f4e-6412-4601-b309-798a439a57ab tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1708.974408] env[62405]: DEBUG nova.virt.hardware [None req-ffd96f4e-6412-4601-b309-798a439a57ab tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1708.974408] env[62405]: DEBUG nova.virt.hardware [None req-ffd96f4e-6412-4601-b309-798a439a57ab tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1708.974408] env[62405]: DEBUG nova.virt.hardware [None req-ffd96f4e-6412-4601-b309-798a439a57ab tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1708.974918] env[62405]: DEBUG nova.virt.hardware [None req-ffd96f4e-6412-4601-b309-798a439a57ab tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1708.975314] env[62405]: DEBUG nova.virt.hardware [None req-ffd96f4e-6412-4601-b309-798a439a57ab tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1708.975710] env[62405]: DEBUG nova.virt.hardware [None req-ffd96f4e-6412-4601-b309-798a439a57ab tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1708.975993] env[62405]: DEBUG nova.virt.hardware [None req-ffd96f4e-6412-4601-b309-798a439a57ab tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1708.976295] env[62405]: DEBUG nova.virt.hardware [None req-ffd96f4e-6412-4601-b309-798a439a57ab tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1708.976601] env[62405]: DEBUG nova.virt.hardware [None req-ffd96f4e-6412-4601-b309-798a439a57ab tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1708.978245] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12a12b0e-5d9a-4e45-a146-355829e4d0ca {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.989179] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dd0821d-c86e-40e9-957a-513a276fb8e4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.996661] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Acquiring lock "153adb6e-5381-4e91-881e-8e566a16905a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1708.997110] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Lock "153adb6e-5381-4e91-881e-8e566a16905a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1709.092405] env[62405]: DEBUG nova.network.neutron [req-0f49d25d-513f-4fc6-bc55-83c01725a2da req-746381e4-211d-480b-88fb-6e9c69b073ad service nova] [instance: aae3abca-951a-4149-9ccb-d70bea218aea] Updated VIF entry in instance network info cache for port c2f0f942-0f3a-45ee-9b01-295f9c3a79cd. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1709.092834] env[62405]: DEBUG nova.network.neutron [req-0f49d25d-513f-4fc6-bc55-83c01725a2da req-746381e4-211d-480b-88fb-6e9c69b073ad service nova] [instance: aae3abca-951a-4149-9ccb-d70bea218aea] Updating instance_info_cache with network_info: [{"id": "c2f0f942-0f3a-45ee-9b01-295f9c3a79cd", "address": "fa:16:3e:8b:df:d2", "network": {"id": "5f90763f-2c20-4d8a-9274-7e692071a6cd", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1386170130-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "18b4edb74b5d4f7a95565aebf78c444f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d8383707-f093-40a7-a5ba-31b0e07cac45", "external-id": "cl2-zone-18", "segmentation_id": 18, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc2f0f942-0f", "ovs_interfaceid": "c2f0f942-0f3a-45ee-9b01-295f9c3a79cd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1709.201386] env[62405]: DEBUG oslo_vmware.api [None req-8c1b8076-ff38-43f7-a3f4-0dbc196ee36a tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Task: {'id': task-1947379, 'name': PowerOnVM_Task, 'duration_secs': 0.41582} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1709.201769] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c1b8076-ff38-43f7-a3f4-0dbc196ee36a tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1709.202010] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-8c1b8076-ff38-43f7-a3f4-0dbc196ee36a tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Updating instance '3c9487ff-2092-4cde-82d5-b38e5bc5c6e3' progress to 100 {{(pid=62405) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1709.212562] env[62405]: DEBUG nova.scheduler.client.report [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1709.246593] env[62405]: DEBUG oslo_vmware.api [None req-71ec4483-05a9-473d-953a-ec97012f914e tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Task: {'id': task-1947380, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.107561} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1709.246974] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-71ec4483-05a9-473d-953a-ec97012f914e tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [instance: aae3abca-951a-4149-9ccb-d70bea218aea] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1709.247908] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5b2d9ec-fa9c-4ca2-a69a-8c81040082ff {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.271216] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-71ec4483-05a9-473d-953a-ec97012f914e tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [instance: aae3abca-951a-4149-9ccb-d70bea218aea] Reconfiguring VM instance instance-00000036 to attach disk [datastore1] aae3abca-951a-4149-9ccb-d70bea218aea/aae3abca-951a-4149-9ccb-d70bea218aea.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1709.273515] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ca843211-1147-4d19-8548-54b9ea5e42ab {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.295674] env[62405]: DEBUG oslo_vmware.api [None req-71ec4483-05a9-473d-953a-ec97012f914e tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Waiting for the task: (returnval){ [ 1709.295674] env[62405]: value = "task-1947381" [ 1709.295674] env[62405]: _type = "Task" [ 1709.295674] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1709.304483] env[62405]: DEBUG oslo_vmware.api [None req-71ec4483-05a9-473d-953a-ec97012f914e tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Task: {'id': task-1947381, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1709.422793] env[62405]: DEBUG oslo_concurrency.lockutils [req-ea699f27-f75a-4ff2-9429-68558fd53770 req-2c74bfc9-ae68-491f-a343-ef53dc01dc8a service nova] Releasing lock "refresh_cache-6c6a3974-c87e-47ed-a025-d6221a8decd7" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1709.595990] env[62405]: DEBUG oslo_concurrency.lockutils [req-0f49d25d-513f-4fc6-bc55-83c01725a2da req-746381e4-211d-480b-88fb-6e9c69b073ad service nova] Releasing lock "refresh_cache-aae3abca-951a-4149-9ccb-d70bea218aea" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1709.718932] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.826s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1709.718932] env[62405]: DEBUG nova.compute.manager [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] [instance: 4c8c0d2f-d8d3-4422-8a5c-8999636b22be] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1709.722153] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 37.977s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1709.722366] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1709.722547] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62405) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1709.722960] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3305b2f0-80e4-45d3-9655-4c486b59e110 tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.898s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1709.723325] env[62405]: DEBUG nova.objects.instance [None req-3305b2f0-80e4-45d3-9655-4c486b59e110 tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] Lazy-loading 'resources' on Instance uuid fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1709.725946] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dae0667-a1cf-4c6a-9bba-fdc5ca0e40cf {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.736065] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fe6597c-6e36-4f29-b4ae-2427fc9bdcfb {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.753643] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-102b92ad-10b0-445e-9ae6-4f57e2d3dd97 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.761725] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7d45acb-e64a-4f29-93e8-52c002704287 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.794826] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178697MB free_disk=171GB free_vcpus=48 pci_devices=None {{(pid=62405) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1709.795938] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1709.807199] env[62405]: DEBUG oslo_vmware.api [None req-71ec4483-05a9-473d-953a-ec97012f914e tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Task: {'id': task-1947381, 'name': ReconfigVM_Task, 'duration_secs': 0.266295} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1709.807671] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-71ec4483-05a9-473d-953a-ec97012f914e tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [instance: aae3abca-951a-4149-9ccb-d70bea218aea] Reconfigured VM instance instance-00000036 to attach disk [datastore1] aae3abca-951a-4149-9ccb-d70bea218aea/aae3abca-951a-4149-9ccb-d70bea218aea.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1709.808350] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dbdf0733-25ae-44fb-a38e-428cc6978048 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.816565] env[62405]: DEBUG oslo_vmware.api [None req-71ec4483-05a9-473d-953a-ec97012f914e tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Waiting for the task: (returnval){ [ 1709.816565] env[62405]: value = "task-1947382" [ 1709.816565] env[62405]: _type = "Task" [ 1709.816565] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1709.829540] env[62405]: DEBUG oslo_vmware.api [None req-71ec4483-05a9-473d-953a-ec97012f914e tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Task: {'id': task-1947382, 'name': Rename_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1710.227458] env[62405]: DEBUG nova.compute.utils [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1710.233239] env[62405]: DEBUG nova.compute.manager [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] [instance: 4c8c0d2f-d8d3-4422-8a5c-8999636b22be] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1710.233533] env[62405]: DEBUG nova.network.neutron [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] [instance: 4c8c0d2f-d8d3-4422-8a5c-8999636b22be] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1710.329561] env[62405]: DEBUG oslo_vmware.api [None req-71ec4483-05a9-473d-953a-ec97012f914e tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Task: {'id': task-1947382, 'name': Rename_Task, 'duration_secs': 0.175514} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1710.329976] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-71ec4483-05a9-473d-953a-ec97012f914e tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [instance: aae3abca-951a-4149-9ccb-d70bea218aea] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1710.330427] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d4427ad6-f09d-4953-b9a4-05068322e942 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.340341] env[62405]: DEBUG oslo_vmware.api [None req-71ec4483-05a9-473d-953a-ec97012f914e tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Waiting for the task: (returnval){ [ 1710.340341] env[62405]: value = "task-1947383" [ 1710.340341] env[62405]: _type = "Task" [ 1710.340341] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1710.351089] env[62405]: DEBUG nova.policy [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9d60370be4844d6198a8a0a692c5ee1c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2337ac45a39041268ce9221de30e16af', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1710.359444] env[62405]: DEBUG oslo_vmware.api [None req-71ec4483-05a9-473d-953a-ec97012f914e tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Task: {'id': task-1947383, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1710.738240] env[62405]: DEBUG nova.compute.manager [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] [instance: 4c8c0d2f-d8d3-4422-8a5c-8999636b22be] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1710.816033] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72f52bd7-bbc9-4f0a-bd62-4d492c5807d6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.825924] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94972216-f311-4c2f-8d11-bc7bc7064314 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.863752] env[62405]: DEBUG nova.network.neutron [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] [instance: 4c8c0d2f-d8d3-4422-8a5c-8999636b22be] Successfully created port: 181e34ed-64d1-4e72-8ea6-a8e10f831868 {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1710.869224] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-234534ec-6ba9-4973-9dc0-9bc6af3b8f31 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.882521] env[62405]: DEBUG oslo_vmware.api [None req-71ec4483-05a9-473d-953a-ec97012f914e tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Task: {'id': task-1947383, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1710.886514] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0ee75d0-8aa4-4f0a-8d5d-463883ccdb44 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.903130] env[62405]: DEBUG nova.compute.provider_tree [None req-3305b2f0-80e4-45d3-9655-4c486b59e110 tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1711.374224] env[62405]: DEBUG oslo_vmware.api [None req-71ec4483-05a9-473d-953a-ec97012f914e tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Task: {'id': task-1947383, 'name': PowerOnVM_Task, 'duration_secs': 0.91585} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1711.374568] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-71ec4483-05a9-473d-953a-ec97012f914e tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [instance: aae3abca-951a-4149-9ccb-d70bea218aea] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1711.374697] env[62405]: INFO nova.compute.manager [None req-71ec4483-05a9-473d-953a-ec97012f914e tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [instance: aae3abca-951a-4149-9ccb-d70bea218aea] Took 6.73 seconds to spawn the instance on the hypervisor. [ 1711.374907] env[62405]: DEBUG nova.compute.manager [None req-71ec4483-05a9-473d-953a-ec97012f914e tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [instance: aae3abca-951a-4149-9ccb-d70bea218aea] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1711.375875] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61186bbb-71b3-4f56-9251-eba4f5bf750f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1711.407057] env[62405]: DEBUG nova.scheduler.client.report [None req-3305b2f0-80e4-45d3-9655-4c486b59e110 tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1711.499109] env[62405]: DEBUG nova.network.neutron [None req-4ab447f8-448f-431a-bdc9-4891d949a542 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Port 7fbae16c-e943-4752-8a7e-92bdea130e1a binding to destination host cpu-1 is already ACTIVE {{(pid=62405) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1711.499397] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4ab447f8-448f-431a-bdc9-4891d949a542 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Acquiring lock "refresh_cache-3c9487ff-2092-4cde-82d5-b38e5bc5c6e3" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1711.499551] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4ab447f8-448f-431a-bdc9-4891d949a542 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Acquired lock "refresh_cache-3c9487ff-2092-4cde-82d5-b38e5bc5c6e3" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1711.499714] env[62405]: DEBUG nova.network.neutron [None req-4ab447f8-448f-431a-bdc9-4891d949a542 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1711.753418] env[62405]: DEBUG nova.compute.manager [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] [instance: 4c8c0d2f-d8d3-4422-8a5c-8999636b22be] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1711.781635] env[62405]: DEBUG nova.virt.hardware [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1711.781948] env[62405]: DEBUG nova.virt.hardware [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1711.782164] env[62405]: DEBUG nova.virt.hardware [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1711.782386] env[62405]: DEBUG nova.virt.hardware [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1711.782568] env[62405]: DEBUG nova.virt.hardware [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1711.782758] env[62405]: DEBUG nova.virt.hardware [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1711.783012] env[62405]: DEBUG nova.virt.hardware [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1711.783233] env[62405]: DEBUG nova.virt.hardware [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1711.783779] env[62405]: DEBUG nova.virt.hardware [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1711.783957] env[62405]: DEBUG nova.virt.hardware [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1711.784153] env[62405]: DEBUG nova.virt.hardware [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1711.785056] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52aeb914-d37c-491c-822f-cf0731976de9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1711.794392] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ba62b6b-144e-4ed8-a0e3-699dd6779436 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1711.899667] env[62405]: INFO nova.compute.manager [None req-71ec4483-05a9-473d-953a-ec97012f914e tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [instance: aae3abca-951a-4149-9ccb-d70bea218aea] Took 45.61 seconds to build instance. [ 1711.911517] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3305b2f0-80e4-45d3-9655-4c486b59e110 tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.188s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1711.914389] env[62405]: DEBUG oslo_concurrency.lockutils [None req-128beb1f-ca94-4320-aacd-f2b1988917b3 tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 38.002s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1711.914936] env[62405]: DEBUG nova.objects.instance [None req-128beb1f-ca94-4320-aacd-f2b1988917b3 tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Lazy-loading 'resources' on Instance uuid 9b71f962-2b92-4f7b-bb8d-b50da5130018 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1712.402254] env[62405]: DEBUG oslo_concurrency.lockutils [None req-71ec4483-05a9-473d-953a-ec97012f914e tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Lock "aae3abca-951a-4149-9ccb-d70bea218aea" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 47.127s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1712.430047] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3305b2f0-80e4-45d3-9655-4c486b59e110 tempest-ServerActionsV293TestJSON-142410575 tempest-ServerActionsV293TestJSON-142410575-project-member] Lock "fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 42.663s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1712.453173] env[62405]: DEBUG nova.network.neutron [None req-4ab447f8-448f-431a-bdc9-4891d949a542 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Updating instance_info_cache with network_info: [{"id": "7fbae16c-e943-4752-8a7e-92bdea130e1a", "address": "fa:16:3e:f9:2e:fa", "network": {"id": "9e3e6e3d-df77-428f-b577-e4a42e82ec43", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.63", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "69dc3a146cd14230b1180689e2fea090", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31e77685-b4dd-4810-80ef-24115ea9ea62", "external-id": "nsx-vlan-transportzone-56", "segmentation_id": 56, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7fbae16c-e9", "ovs_interfaceid": "7fbae16c-e943-4752-8a7e-92bdea130e1a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1712.891175] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0031f040-6649-41ad-8706-74f550d0e9e0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.900575] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-478d2897-d693-43f7-9809-75f5eea907f2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.905260] env[62405]: DEBUG nova.compute.manager [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] [instance: 742c8d94-48d1-4408-91dc-98f25661aa8d] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1712.939720] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45525937-7dcb-4f12-afee-179699b35237 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.949154] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4e19958-219e-4fba-aa62-5b21f77d9014 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.956009] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4ab447f8-448f-431a-bdc9-4891d949a542 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Releasing lock "refresh_cache-3c9487ff-2092-4cde-82d5-b38e5bc5c6e3" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1712.967640] env[62405]: DEBUG nova.compute.provider_tree [None req-128beb1f-ca94-4320-aacd-f2b1988917b3 tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1712.969785] env[62405]: DEBUG nova.compute.manager [None req-4ab447f8-448f-431a-bdc9-4891d949a542 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=62405) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:901}} [ 1712.970000] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4ab447f8-448f-431a-bdc9-4891d949a542 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1713.461956] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1713.472233] env[62405]: DEBUG nova.scheduler.client.report [None req-128beb1f-ca94-4320-aacd-f2b1988917b3 tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1713.979535] env[62405]: DEBUG oslo_concurrency.lockutils [None req-128beb1f-ca94-4320-aacd-f2b1988917b3 tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.065s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1713.984584] env[62405]: DEBUG oslo_concurrency.lockutils [None req-39a76771-768e-4c4e-92eb-e674f5cca5fb tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 39.856s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1713.984584] env[62405]: DEBUG nova.objects.instance [None req-39a76771-768e-4c4e-92eb-e674f5cca5fb tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Lazy-loading 'resources' on Instance uuid a9f83357-4898-44ff-a6d8-ea6621453de9 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1714.000334] env[62405]: INFO nova.scheduler.client.report [None req-128beb1f-ca94-4320-aacd-f2b1988917b3 tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Deleted allocations for instance 9b71f962-2b92-4f7b-bb8d-b50da5130018 [ 1714.508567] env[62405]: DEBUG oslo_concurrency.lockutils [None req-128beb1f-ca94-4320-aacd-f2b1988917b3 tempest-ServersTestFqdnHostnames-139085892 tempest-ServersTestFqdnHostnames-139085892-project-member] Lock "9b71f962-2b92-4f7b-bb8d-b50da5130018" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 44.969s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1714.527933] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2d276892-99fc-4e22-a015-4f2656f78414 tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Acquiring lock "aae3abca-951a-4149-9ccb-d70bea218aea" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1714.528206] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2d276892-99fc-4e22-a015-4f2656f78414 tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Lock "aae3abca-951a-4149-9ccb-d70bea218aea" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1714.529060] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2d276892-99fc-4e22-a015-4f2656f78414 tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Acquiring lock "aae3abca-951a-4149-9ccb-d70bea218aea-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1714.529060] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2d276892-99fc-4e22-a015-4f2656f78414 tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Lock "aae3abca-951a-4149-9ccb-d70bea218aea-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1714.529060] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2d276892-99fc-4e22-a015-4f2656f78414 tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Lock "aae3abca-951a-4149-9ccb-d70bea218aea-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1714.530878] env[62405]: INFO nova.compute.manager [None req-2d276892-99fc-4e22-a015-4f2656f78414 tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [instance: aae3abca-951a-4149-9ccb-d70bea218aea] Terminating instance [ 1714.985830] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7ef28f9-6207-4a5d-b4a1-2b670620ff67 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.994396] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d62cce92-37a1-4acf-9f42-f8c8fdb49ddb {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.026053] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26f7eeaa-b433-4b67-aeee-d201f2dc8ffb {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.034339] env[62405]: DEBUG nova.compute.manager [None req-2d276892-99fc-4e22-a015-4f2656f78414 tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [instance: aae3abca-951a-4149-9ccb-d70bea218aea] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1715.034570] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-2d276892-99fc-4e22-a015-4f2656f78414 tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [instance: aae3abca-951a-4149-9ccb-d70bea218aea] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1715.036426] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f44393e-64ea-4b89-84a1-8949a31219e4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.040628] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57de6bea-d7d3-45c0-a424-08eef30e7a91 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.049633] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d276892-99fc-4e22-a015-4f2656f78414 tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [instance: aae3abca-951a-4149-9ccb-d70bea218aea] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1715.057674] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-453dba49-e67d-4a49-96fc-359451490646 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.059841] env[62405]: DEBUG nova.compute.provider_tree [None req-39a76771-768e-4c4e-92eb-e674f5cca5fb tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1715.067301] env[62405]: DEBUG oslo_vmware.api [None req-2d276892-99fc-4e22-a015-4f2656f78414 tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Waiting for the task: (returnval){ [ 1715.067301] env[62405]: value = "task-1947384" [ 1715.067301] env[62405]: _type = "Task" [ 1715.067301] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1715.078091] env[62405]: DEBUG oslo_vmware.api [None req-2d276892-99fc-4e22-a015-4f2656f78414 tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Task: {'id': task-1947384, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1715.452560] env[62405]: DEBUG oslo_vmware.rw_handles [None req-a406d4cf-af98-42c4-bf36-7b8b7b567131 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52d5ad54-5f89-e331-05e1-c552cd3cda73/disk-0.vmdk. {{(pid=62405) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1715.453960] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab6f91f6-745f-49c1-956d-5ef4bb1d480d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.461439] env[62405]: DEBUG oslo_vmware.rw_handles [None req-a406d4cf-af98-42c4-bf36-7b8b7b567131 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52d5ad54-5f89-e331-05e1-c552cd3cda73/disk-0.vmdk is in state: ready. {{(pid=62405) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1715.461626] env[62405]: ERROR oslo_vmware.rw_handles [None req-a406d4cf-af98-42c4-bf36-7b8b7b567131 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52d5ad54-5f89-e331-05e1-c552cd3cda73/disk-0.vmdk due to incomplete transfer. [ 1715.461861] env[62405]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-abade319-44cb-44e0-ade3-64533701c86e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.471728] env[62405]: DEBUG oslo_vmware.rw_handles [None req-a406d4cf-af98-42c4-bf36-7b8b7b567131 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52d5ad54-5f89-e331-05e1-c552cd3cda73/disk-0.vmdk. {{(pid=62405) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1715.472366] env[62405]: DEBUG nova.virt.vmwareapi.images [None req-a406d4cf-af98-42c4-bf36-7b8b7b567131 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Uploaded image d7233774-277c-4c93-9db4-49320793fa07 to the Glance image server {{(pid=62405) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1715.476262] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-a406d4cf-af98-42c4-bf36-7b8b7b567131 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Destroying the VM {{(pid=62405) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1715.476829] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-f71c5ec4-d298-434d-9e45-87504a57865d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.486110] env[62405]: DEBUG oslo_vmware.api [None req-a406d4cf-af98-42c4-bf36-7b8b7b567131 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for the task: (returnval){ [ 1715.486110] env[62405]: value = "task-1947385" [ 1715.486110] env[62405]: _type = "Task" [ 1715.486110] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1715.495938] env[62405]: DEBUG oslo_vmware.api [None req-a406d4cf-af98-42c4-bf36-7b8b7b567131 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1947385, 'name': Destroy_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1715.563625] env[62405]: DEBUG nova.scheduler.client.report [None req-39a76771-768e-4c4e-92eb-e674f5cca5fb tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1715.582172] env[62405]: DEBUG oslo_vmware.api [None req-2d276892-99fc-4e22-a015-4f2656f78414 tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Task: {'id': task-1947384, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1715.585798] env[62405]: DEBUG nova.compute.manager [req-b2291d43-5713-44b1-9501-4a36a627dd60 req-3d008ee5-0f13-4276-adc4-59cd7c65e7cc service nova] [instance: 4c8c0d2f-d8d3-4422-8a5c-8999636b22be] Received event network-vif-plugged-181e34ed-64d1-4e72-8ea6-a8e10f831868 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1715.586019] env[62405]: DEBUG oslo_concurrency.lockutils [req-b2291d43-5713-44b1-9501-4a36a627dd60 req-3d008ee5-0f13-4276-adc4-59cd7c65e7cc service nova] Acquiring lock "4c8c0d2f-d8d3-4422-8a5c-8999636b22be-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1715.586506] env[62405]: DEBUG oslo_concurrency.lockutils [req-b2291d43-5713-44b1-9501-4a36a627dd60 req-3d008ee5-0f13-4276-adc4-59cd7c65e7cc service nova] Lock "4c8c0d2f-d8d3-4422-8a5c-8999636b22be-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1715.586824] env[62405]: DEBUG oslo_concurrency.lockutils [req-b2291d43-5713-44b1-9501-4a36a627dd60 req-3d008ee5-0f13-4276-adc4-59cd7c65e7cc service nova] Lock "4c8c0d2f-d8d3-4422-8a5c-8999636b22be-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1715.587055] env[62405]: DEBUG nova.compute.manager [req-b2291d43-5713-44b1-9501-4a36a627dd60 req-3d008ee5-0f13-4276-adc4-59cd7c65e7cc service nova] [instance: 4c8c0d2f-d8d3-4422-8a5c-8999636b22be] No waiting events found dispatching network-vif-plugged-181e34ed-64d1-4e72-8ea6-a8e10f831868 {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1715.587248] env[62405]: WARNING nova.compute.manager [req-b2291d43-5713-44b1-9501-4a36a627dd60 req-3d008ee5-0f13-4276-adc4-59cd7c65e7cc service nova] [instance: 4c8c0d2f-d8d3-4422-8a5c-8999636b22be] Received unexpected event network-vif-plugged-181e34ed-64d1-4e72-8ea6-a8e10f831868 for instance with vm_state building and task_state spawning. [ 1715.622083] env[62405]: DEBUG nova.compute.manager [req-513549df-5d00-4eb1-83d3-4b459587b5e8 req-70228064-ee36-46d0-ad33-dae1bd40fb9a service nova] [instance: 2c623c00-92f2-4cc4-8503-963c3308d708] Received event network-vif-plugged-3cb3354b-4416-4325-9602-8abc5afe9861 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1715.622229] env[62405]: DEBUG oslo_concurrency.lockutils [req-513549df-5d00-4eb1-83d3-4b459587b5e8 req-70228064-ee36-46d0-ad33-dae1bd40fb9a service nova] Acquiring lock "2c623c00-92f2-4cc4-8503-963c3308d708-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1715.622429] env[62405]: DEBUG oslo_concurrency.lockutils [req-513549df-5d00-4eb1-83d3-4b459587b5e8 req-70228064-ee36-46d0-ad33-dae1bd40fb9a service nova] Lock "2c623c00-92f2-4cc4-8503-963c3308d708-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1715.622586] env[62405]: DEBUG oslo_concurrency.lockutils [req-513549df-5d00-4eb1-83d3-4b459587b5e8 req-70228064-ee36-46d0-ad33-dae1bd40fb9a service nova] Lock "2c623c00-92f2-4cc4-8503-963c3308d708-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1715.622743] env[62405]: DEBUG nova.compute.manager [req-513549df-5d00-4eb1-83d3-4b459587b5e8 req-70228064-ee36-46d0-ad33-dae1bd40fb9a service nova] [instance: 2c623c00-92f2-4cc4-8503-963c3308d708] No waiting events found dispatching network-vif-plugged-3cb3354b-4416-4325-9602-8abc5afe9861 {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1715.622898] env[62405]: WARNING nova.compute.manager [req-513549df-5d00-4eb1-83d3-4b459587b5e8 req-70228064-ee36-46d0-ad33-dae1bd40fb9a service nova] [instance: 2c623c00-92f2-4cc4-8503-963c3308d708] Received unexpected event network-vif-plugged-3cb3354b-4416-4325-9602-8abc5afe9861 for instance with vm_state building and task_state spawning. [ 1715.729135] env[62405]: DEBUG nova.network.neutron [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] [instance: 4c8c0d2f-d8d3-4422-8a5c-8999636b22be] Successfully updated port: 181e34ed-64d1-4e72-8ea6-a8e10f831868 {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1715.826658] env[62405]: DEBUG nova.network.neutron [None req-ffd96f4e-6412-4601-b309-798a439a57ab tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 2c623c00-92f2-4cc4-8503-963c3308d708] Successfully updated port: 3cb3354b-4416-4325-9602-8abc5afe9861 {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1715.998446] env[62405]: DEBUG oslo_vmware.api [None req-a406d4cf-af98-42c4-bf36-7b8b7b567131 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1947385, 'name': Destroy_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1716.081471] env[62405]: DEBUG oslo_concurrency.lockutils [None req-39a76771-768e-4c4e-92eb-e674f5cca5fb tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.094s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1716.081471] env[62405]: DEBUG oslo_vmware.api [None req-2d276892-99fc-4e22-a015-4f2656f78414 tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Task: {'id': task-1947384, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1716.081471] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 41.554s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1716.082506] env[62405]: INFO nova.compute.claims [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: 9b495caf-4394-40c0-b68f-d02c7d759a6a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1716.112631] env[62405]: INFO nova.scheduler.client.report [None req-39a76771-768e-4c4e-92eb-e674f5cca5fb tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Deleted allocations for instance a9f83357-4898-44ff-a6d8-ea6621453de9 [ 1716.233428] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] Acquiring lock "refresh_cache-4c8c0d2f-d8d3-4422-8a5c-8999636b22be" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1716.233615] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] Acquired lock "refresh_cache-4c8c0d2f-d8d3-4422-8a5c-8999636b22be" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1716.233786] env[62405]: DEBUG nova.network.neutron [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] [instance: 4c8c0d2f-d8d3-4422-8a5c-8999636b22be] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1716.331733] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ffd96f4e-6412-4601-b309-798a439a57ab tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Acquiring lock "refresh_cache-2c623c00-92f2-4cc4-8503-963c3308d708" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1716.331926] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ffd96f4e-6412-4601-b309-798a439a57ab tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Acquired lock "refresh_cache-2c623c00-92f2-4cc4-8503-963c3308d708" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1716.332079] env[62405]: DEBUG nova.network.neutron [None req-ffd96f4e-6412-4601-b309-798a439a57ab tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 2c623c00-92f2-4cc4-8503-963c3308d708] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1716.497462] env[62405]: DEBUG oslo_vmware.api [None req-a406d4cf-af98-42c4-bf36-7b8b7b567131 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1947385, 'name': Destroy_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1716.579348] env[62405]: DEBUG oslo_vmware.api [None req-2d276892-99fc-4e22-a015-4f2656f78414 tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Task: {'id': task-1947384, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1716.620512] env[62405]: DEBUG oslo_concurrency.lockutils [None req-39a76771-768e-4c4e-92eb-e674f5cca5fb tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Lock "a9f83357-4898-44ff-a6d8-ea6621453de9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 45.748s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1716.770026] env[62405]: DEBUG nova.network.neutron [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] [instance: 4c8c0d2f-d8d3-4422-8a5c-8999636b22be] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1716.922184] env[62405]: DEBUG nova.network.neutron [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] [instance: 4c8c0d2f-d8d3-4422-8a5c-8999636b22be] Updating instance_info_cache with network_info: [{"id": "181e34ed-64d1-4e72-8ea6-a8e10f831868", "address": "fa:16:3e:10:6a:cc", "network": {"id": "c58a90d3-fe0c-4747-a255-d28e444a8fc0", "bridge": "br-int", "label": "tempest-ServersTestJSON-1500033972-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2337ac45a39041268ce9221de30e16af", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "04e15990-16e1-4cb2-b0f0-06c362e68c5e", "external-id": "nsx-vlan-transportzone-555", "segmentation_id": 555, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap181e34ed-64", "ovs_interfaceid": "181e34ed-64d1-4e72-8ea6-a8e10f831868", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1716.998949] env[62405]: DEBUG oslo_vmware.api [None req-a406d4cf-af98-42c4-bf36-7b8b7b567131 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1947385, 'name': Destroy_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1717.082862] env[62405]: DEBUG oslo_vmware.api [None req-2d276892-99fc-4e22-a015-4f2656f78414 tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Task: {'id': task-1947384, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1717.121165] env[62405]: DEBUG nova.network.neutron [None req-ffd96f4e-6412-4601-b309-798a439a57ab tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 2c623c00-92f2-4cc4-8503-963c3308d708] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1717.406714] env[62405]: DEBUG nova.network.neutron [None req-ffd96f4e-6412-4601-b309-798a439a57ab tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 2c623c00-92f2-4cc4-8503-963c3308d708] Updating instance_info_cache with network_info: [{"id": "3cb3354b-4416-4325-9602-8abc5afe9861", "address": "fa:16:3e:55:48:52", "network": {"id": "dfd15c21-b7a5-492a-afe3-8eb96515351d", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-175692276-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6014bab6bc9a4b059bab88e44b31f446", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "950a2f67-7668-4376-9d48-b38dca033c40", "external-id": "nsx-vlan-transportzone-549", "segmentation_id": 549, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3cb3354b-44", "ovs_interfaceid": "3cb3354b-4416-4325-9602-8abc5afe9861", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1717.427020] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] Releasing lock "refresh_cache-4c8c0d2f-d8d3-4422-8a5c-8999636b22be" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1717.427020] env[62405]: DEBUG nova.compute.manager [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] [instance: 4c8c0d2f-d8d3-4422-8a5c-8999636b22be] Instance network_info: |[{"id": "181e34ed-64d1-4e72-8ea6-a8e10f831868", "address": "fa:16:3e:10:6a:cc", "network": {"id": "c58a90d3-fe0c-4747-a255-d28e444a8fc0", "bridge": "br-int", "label": "tempest-ServersTestJSON-1500033972-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2337ac45a39041268ce9221de30e16af", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "04e15990-16e1-4cb2-b0f0-06c362e68c5e", "external-id": "nsx-vlan-transportzone-555", "segmentation_id": 555, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap181e34ed-64", "ovs_interfaceid": "181e34ed-64d1-4e72-8ea6-a8e10f831868", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1717.427020] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] [instance: 4c8c0d2f-d8d3-4422-8a5c-8999636b22be] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:10:6a:cc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '04e15990-16e1-4cb2-b0f0-06c362e68c5e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '181e34ed-64d1-4e72-8ea6-a8e10f831868', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1717.433426] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] Creating folder: Project (2337ac45a39041268ce9221de30e16af). Parent ref: group-v401284. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1717.438987] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6d860a76-ce41-416b-abec-3f4feb4852b4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.453254] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] Created folder: Project (2337ac45a39041268ce9221de30e16af) in parent group-v401284. [ 1717.453421] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] Creating folder: Instances. Parent ref: group-v401456. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1717.453679] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2ac2d90f-5f96-4a55-86b9-df0541c0525f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.468252] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] Created folder: Instances in parent group-v401456. [ 1717.468596] env[62405]: DEBUG oslo.service.loopingcall [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1717.468836] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4c8c0d2f-d8d3-4422-8a5c-8999636b22be] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1717.469073] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f18c3d22-75b1-4a29-a00e-7a7794895c2b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.494814] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1717.494814] env[62405]: value = "task-1947388" [ 1717.494814] env[62405]: _type = "Task" [ 1717.494814] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1717.508831] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947388, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1717.514320] env[62405]: DEBUG oslo_vmware.api [None req-a406d4cf-af98-42c4-bf36-7b8b7b567131 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1947385, 'name': Destroy_Task} progress is 33%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1717.584073] env[62405]: DEBUG oslo_vmware.api [None req-2d276892-99fc-4e22-a015-4f2656f78414 tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Task: {'id': task-1947384, 'name': PowerOffVM_Task, 'duration_secs': 2.28914} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1717.590438] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d276892-99fc-4e22-a015-4f2656f78414 tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [instance: aae3abca-951a-4149-9ccb-d70bea218aea] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1717.590438] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-2d276892-99fc-4e22-a015-4f2656f78414 tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [instance: aae3abca-951a-4149-9ccb-d70bea218aea] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1717.590438] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0786b508-a250-4a95-9e4e-81d6e2dd3a73 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.628549] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90f0dcd9-dcd2-46a5-98d9-a6fc331e8827 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.635799] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d63ae7f-4c85-48d4-8c37-fb925f289262 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.666846] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cb3d618-5afc-4c85-a7cc-3e8bb0199822 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.675287] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4433746b-afd7-4532-be04-77f17848f5c0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.694656] env[62405]: DEBUG nova.compute.provider_tree [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1717.891854] env[62405]: DEBUG nova.compute.manager [req-2d0e0ee3-b301-4a6e-9d3d-7b36227348ad req-285064fa-679a-41ee-b7c8-70b246790b0a service nova] [instance: 4c8c0d2f-d8d3-4422-8a5c-8999636b22be] Received event network-changed-181e34ed-64d1-4e72-8ea6-a8e10f831868 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1717.891854] env[62405]: DEBUG nova.compute.manager [req-2d0e0ee3-b301-4a6e-9d3d-7b36227348ad req-285064fa-679a-41ee-b7c8-70b246790b0a service nova] [instance: 4c8c0d2f-d8d3-4422-8a5c-8999636b22be] Refreshing instance network info cache due to event network-changed-181e34ed-64d1-4e72-8ea6-a8e10f831868. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1717.891854] env[62405]: DEBUG oslo_concurrency.lockutils [req-2d0e0ee3-b301-4a6e-9d3d-7b36227348ad req-285064fa-679a-41ee-b7c8-70b246790b0a service nova] Acquiring lock "refresh_cache-4c8c0d2f-d8d3-4422-8a5c-8999636b22be" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1717.892186] env[62405]: DEBUG oslo_concurrency.lockutils [req-2d0e0ee3-b301-4a6e-9d3d-7b36227348ad req-285064fa-679a-41ee-b7c8-70b246790b0a service nova] Acquired lock "refresh_cache-4c8c0d2f-d8d3-4422-8a5c-8999636b22be" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1717.892547] env[62405]: DEBUG nova.network.neutron [req-2d0e0ee3-b301-4a6e-9d3d-7b36227348ad req-285064fa-679a-41ee-b7c8-70b246790b0a service nova] [instance: 4c8c0d2f-d8d3-4422-8a5c-8999636b22be] Refreshing network info cache for port 181e34ed-64d1-4e72-8ea6-a8e10f831868 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1717.909817] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ffd96f4e-6412-4601-b309-798a439a57ab tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Releasing lock "refresh_cache-2c623c00-92f2-4cc4-8503-963c3308d708" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1717.909988] env[62405]: DEBUG nova.compute.manager [None req-ffd96f4e-6412-4601-b309-798a439a57ab tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 2c623c00-92f2-4cc4-8503-963c3308d708] Instance network_info: |[{"id": "3cb3354b-4416-4325-9602-8abc5afe9861", "address": "fa:16:3e:55:48:52", "network": {"id": "dfd15c21-b7a5-492a-afe3-8eb96515351d", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-175692276-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6014bab6bc9a4b059bab88e44b31f446", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "950a2f67-7668-4376-9d48-b38dca033c40", "external-id": "nsx-vlan-transportzone-549", "segmentation_id": 549, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3cb3354b-44", "ovs_interfaceid": "3cb3354b-4416-4325-9602-8abc5afe9861", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1717.911650] env[62405]: DEBUG nova.compute.manager [req-a33f2a77-623a-43ab-96ea-ee2267f6c8bb req-f6d245a7-6822-4e59-ae0b-ebaace928d67 service nova] [instance: 2c623c00-92f2-4cc4-8503-963c3308d708] Received event network-changed-3cb3354b-4416-4325-9602-8abc5afe9861 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1717.911692] env[62405]: DEBUG nova.compute.manager [req-a33f2a77-623a-43ab-96ea-ee2267f6c8bb req-f6d245a7-6822-4e59-ae0b-ebaace928d67 service nova] [instance: 2c623c00-92f2-4cc4-8503-963c3308d708] Refreshing instance network info cache due to event network-changed-3cb3354b-4416-4325-9602-8abc5afe9861. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1717.911945] env[62405]: DEBUG oslo_concurrency.lockutils [req-a33f2a77-623a-43ab-96ea-ee2267f6c8bb req-f6d245a7-6822-4e59-ae0b-ebaace928d67 service nova] Acquiring lock "refresh_cache-2c623c00-92f2-4cc4-8503-963c3308d708" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1717.912290] env[62405]: DEBUG oslo_concurrency.lockutils [req-a33f2a77-623a-43ab-96ea-ee2267f6c8bb req-f6d245a7-6822-4e59-ae0b-ebaace928d67 service nova] Acquired lock "refresh_cache-2c623c00-92f2-4cc4-8503-963c3308d708" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1717.912434] env[62405]: DEBUG nova.network.neutron [req-a33f2a77-623a-43ab-96ea-ee2267f6c8bb req-f6d245a7-6822-4e59-ae0b-ebaace928d67 service nova] [instance: 2c623c00-92f2-4cc4-8503-963c3308d708] Refreshing network info cache for port 3cb3354b-4416-4325-9602-8abc5afe9861 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1717.914044] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-ffd96f4e-6412-4601-b309-798a439a57ab tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 2c623c00-92f2-4cc4-8503-963c3308d708] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:55:48:52', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '950a2f67-7668-4376-9d48-b38dca033c40', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3cb3354b-4416-4325-9602-8abc5afe9861', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1717.922313] env[62405]: DEBUG oslo.service.loopingcall [None req-ffd96f4e-6412-4601-b309-798a439a57ab tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1717.925777] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2c623c00-92f2-4cc4-8503-963c3308d708] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1717.926499] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1045be44-362f-4325-a6f7-858780423d40 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.948693] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1717.948693] env[62405]: value = "task-1947390" [ 1717.948693] env[62405]: _type = "Task" [ 1717.948693] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1717.959029] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947390, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1718.001732] env[62405]: DEBUG oslo_vmware.api [None req-a406d4cf-af98-42c4-bf36-7b8b7b567131 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1947385, 'name': Destroy_Task, 'duration_secs': 2.148759} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1718.004823] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-a406d4cf-af98-42c4-bf36-7b8b7b567131 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Destroyed the VM [ 1718.005093] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-a406d4cf-af98-42c4-bf36-7b8b7b567131 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Deleting Snapshot of the VM instance {{(pid=62405) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1718.005418] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-3efedf92-5d0c-401e-92a8-2a5efd0f03d7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.010961] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947388, 'name': CreateVM_Task, 'duration_secs': 0.472733} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1718.011487] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4c8c0d2f-d8d3-4422-8a5c-8999636b22be] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1718.012421] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1718.012421] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1718.013060] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1718.013060] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e04f36ec-ed32-4332-93f7-72481ee6938f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.018269] env[62405]: DEBUG oslo_vmware.api [None req-a406d4cf-af98-42c4-bf36-7b8b7b567131 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for the task: (returnval){ [ 1718.018269] env[62405]: value = "task-1947391" [ 1718.018269] env[62405]: _type = "Task" [ 1718.018269] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1718.020156] env[62405]: DEBUG oslo_vmware.api [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] Waiting for the task: (returnval){ [ 1718.020156] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52c6e641-0c99-2f94-e3e1-821856f47c2d" [ 1718.020156] env[62405]: _type = "Task" [ 1718.020156] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1718.032936] env[62405]: DEBUG oslo_vmware.api [None req-a406d4cf-af98-42c4-bf36-7b8b7b567131 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1947391, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1718.038448] env[62405]: DEBUG oslo_vmware.api [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52c6e641-0c99-2f94-e3e1-821856f47c2d, 'name': SearchDatastore_Task, 'duration_secs': 0.011821} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1718.039057] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1718.039057] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] [instance: 4c8c0d2f-d8d3-4422-8a5c-8999636b22be] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1718.039717] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1718.039717] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1718.039717] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1718.039971] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1b737b9a-c743-487f-88b5-535d43f49063 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.050511] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1718.053709] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1718.053709] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5fc88f5e-9e3b-40f0-8cfc-5288aa073658 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.057553] env[62405]: DEBUG oslo_vmware.api [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] Waiting for the task: (returnval){ [ 1718.057553] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52482557-7efa-bec1-f9a4-93fbee17a49e" [ 1718.057553] env[62405]: _type = "Task" [ 1718.057553] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1718.066232] env[62405]: DEBUG oslo_vmware.api [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52482557-7efa-bec1-f9a4-93fbee17a49e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1718.118204] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-2d276892-99fc-4e22-a015-4f2656f78414 tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [instance: aae3abca-951a-4149-9ccb-d70bea218aea] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1718.118425] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-2d276892-99fc-4e22-a015-4f2656f78414 tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [instance: aae3abca-951a-4149-9ccb-d70bea218aea] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1718.118600] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-2d276892-99fc-4e22-a015-4f2656f78414 tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Deleting the datastore file [datastore1] aae3abca-951a-4149-9ccb-d70bea218aea {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1718.118853] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7ba54d71-9d04-4b15-8772-8a7c980af436 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.126087] env[62405]: DEBUG oslo_vmware.api [None req-2d276892-99fc-4e22-a015-4f2656f78414 tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Waiting for the task: (returnval){ [ 1718.126087] env[62405]: value = "task-1947392" [ 1718.126087] env[62405]: _type = "Task" [ 1718.126087] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1718.136168] env[62405]: DEBUG oslo_vmware.api [None req-2d276892-99fc-4e22-a015-4f2656f78414 tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Task: {'id': task-1947392, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1718.198698] env[62405]: DEBUG oslo_concurrency.lockutils [None req-922bf869-d298-4d92-8aca-7264394af468 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Acquiring lock "78b4c6ea-6f5b-40d8-8c4a-10332f176e0b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1718.198698] env[62405]: DEBUG oslo_concurrency.lockutils [None req-922bf869-d298-4d92-8aca-7264394af468 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Lock "78b4c6ea-6f5b-40d8-8c4a-10332f176e0b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1718.198698] env[62405]: DEBUG oslo_concurrency.lockutils [None req-922bf869-d298-4d92-8aca-7264394af468 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Acquiring lock "78b4c6ea-6f5b-40d8-8c4a-10332f176e0b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1718.198698] env[62405]: DEBUG oslo_concurrency.lockutils [None req-922bf869-d298-4d92-8aca-7264394af468 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Lock "78b4c6ea-6f5b-40d8-8c4a-10332f176e0b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1718.198698] env[62405]: DEBUG oslo_concurrency.lockutils [None req-922bf869-d298-4d92-8aca-7264394af468 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Lock "78b4c6ea-6f5b-40d8-8c4a-10332f176e0b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1718.200354] env[62405]: DEBUG nova.scheduler.client.report [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1718.204630] env[62405]: INFO nova.compute.manager [None req-922bf869-d298-4d92-8aca-7264394af468 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] [instance: 78b4c6ea-6f5b-40d8-8c4a-10332f176e0b] Terminating instance [ 1718.251578] env[62405]: DEBUG nova.network.neutron [req-a33f2a77-623a-43ab-96ea-ee2267f6c8bb req-f6d245a7-6822-4e59-ae0b-ebaace928d67 service nova] [instance: 2c623c00-92f2-4cc4-8503-963c3308d708] Updated VIF entry in instance network info cache for port 3cb3354b-4416-4325-9602-8abc5afe9861. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1718.251952] env[62405]: DEBUG nova.network.neutron [req-a33f2a77-623a-43ab-96ea-ee2267f6c8bb req-f6d245a7-6822-4e59-ae0b-ebaace928d67 service nova] [instance: 2c623c00-92f2-4cc4-8503-963c3308d708] Updating instance_info_cache with network_info: [{"id": "3cb3354b-4416-4325-9602-8abc5afe9861", "address": "fa:16:3e:55:48:52", "network": {"id": "dfd15c21-b7a5-492a-afe3-8eb96515351d", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-175692276-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6014bab6bc9a4b059bab88e44b31f446", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "950a2f67-7668-4376-9d48-b38dca033c40", "external-id": "nsx-vlan-transportzone-549", "segmentation_id": 549, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3cb3354b-44", "ovs_interfaceid": "3cb3354b-4416-4325-9602-8abc5afe9861", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1718.460866] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947390, 'name': CreateVM_Task, 'duration_secs': 0.371964} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1718.461241] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2c623c00-92f2-4cc4-8503-963c3308d708] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1718.462892] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ffd96f4e-6412-4601-b309-798a439a57ab tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1718.463313] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ffd96f4e-6412-4601-b309-798a439a57ab tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1718.463501] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ffd96f4e-6412-4601-b309-798a439a57ab tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1718.463803] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-632ed3c7-68c3-47a8-9835-d43a2d0ed074 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.470621] env[62405]: DEBUG oslo_vmware.api [None req-ffd96f4e-6412-4601-b309-798a439a57ab tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Waiting for the task: (returnval){ [ 1718.470621] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]528481f5-bc0c-cf79-736d-01d22c75c880" [ 1718.470621] env[62405]: _type = "Task" [ 1718.470621] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1718.480708] env[62405]: DEBUG oslo_vmware.api [None req-ffd96f4e-6412-4601-b309-798a439a57ab tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]528481f5-bc0c-cf79-736d-01d22c75c880, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1718.529929] env[62405]: DEBUG oslo_vmware.api [None req-a406d4cf-af98-42c4-bf36-7b8b7b567131 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1947391, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1718.570750] env[62405]: DEBUG oslo_vmware.api [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52482557-7efa-bec1-f9a4-93fbee17a49e, 'name': SearchDatastore_Task, 'duration_secs': 0.010366} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1718.570750] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d6c5682f-0578-4a79-9c99-a21fa2d12912 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.576476] env[62405]: DEBUG oslo_vmware.api [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] Waiting for the task: (returnval){ [ 1718.576476] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52543892-59f4-e6e4-8a72-75ce1d6b0b73" [ 1718.576476] env[62405]: _type = "Task" [ 1718.576476] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1718.585479] env[62405]: DEBUG oslo_vmware.api [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52543892-59f4-e6e4-8a72-75ce1d6b0b73, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1718.638122] env[62405]: DEBUG oslo_vmware.api [None req-2d276892-99fc-4e22-a015-4f2656f78414 tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Task: {'id': task-1947392, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.151012} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1718.640703] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-2d276892-99fc-4e22-a015-4f2656f78414 tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1718.640911] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-2d276892-99fc-4e22-a015-4f2656f78414 tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [instance: aae3abca-951a-4149-9ccb-d70bea218aea] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1718.641106] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-2d276892-99fc-4e22-a015-4f2656f78414 tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [instance: aae3abca-951a-4149-9ccb-d70bea218aea] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1718.641315] env[62405]: INFO nova.compute.manager [None req-2d276892-99fc-4e22-a015-4f2656f78414 tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [instance: aae3abca-951a-4149-9ccb-d70bea218aea] Took 3.61 seconds to destroy the instance on the hypervisor. [ 1718.641505] env[62405]: DEBUG oslo.service.loopingcall [None req-2d276892-99fc-4e22-a015-4f2656f78414 tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1718.641741] env[62405]: DEBUG nova.compute.manager [-] [instance: aae3abca-951a-4149-9ccb-d70bea218aea] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1718.641838] env[62405]: DEBUG nova.network.neutron [-] [instance: aae3abca-951a-4149-9ccb-d70bea218aea] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1718.710975] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.630s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1718.711549] env[62405]: DEBUG nova.compute.manager [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: 9b495caf-4394-40c0-b68f-d02c7d759a6a] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1718.718024] env[62405]: DEBUG nova.compute.manager [None req-922bf869-d298-4d92-8aca-7264394af468 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] [instance: 78b4c6ea-6f5b-40d8-8c4a-10332f176e0b] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1718.718024] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-922bf869-d298-4d92-8aca-7264394af468 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] [instance: 78b4c6ea-6f5b-40d8-8c4a-10332f176e0b] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1718.718024] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 44.172s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1718.718024] env[62405]: INFO nova.compute.claims [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: dbb5dda5-5420-4d7b-8b32-152d51cb2fb9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1718.721074] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c94164f-7a09-454f-b79c-e0f4398c795c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.728534] env[62405]: DEBUG nova.network.neutron [req-2d0e0ee3-b301-4a6e-9d3d-7b36227348ad req-285064fa-679a-41ee-b7c8-70b246790b0a service nova] [instance: 4c8c0d2f-d8d3-4422-8a5c-8999636b22be] Updated VIF entry in instance network info cache for port 181e34ed-64d1-4e72-8ea6-a8e10f831868. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1718.729149] env[62405]: DEBUG nova.network.neutron [req-2d0e0ee3-b301-4a6e-9d3d-7b36227348ad req-285064fa-679a-41ee-b7c8-70b246790b0a service nova] [instance: 4c8c0d2f-d8d3-4422-8a5c-8999636b22be] Updating instance_info_cache with network_info: [{"id": "181e34ed-64d1-4e72-8ea6-a8e10f831868", "address": "fa:16:3e:10:6a:cc", "network": {"id": "c58a90d3-fe0c-4747-a255-d28e444a8fc0", "bridge": "br-int", "label": "tempest-ServersTestJSON-1500033972-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2337ac45a39041268ce9221de30e16af", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "04e15990-16e1-4cb2-b0f0-06c362e68c5e", "external-id": "nsx-vlan-transportzone-555", "segmentation_id": 555, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap181e34ed-64", "ovs_interfaceid": "181e34ed-64d1-4e72-8ea6-a8e10f831868", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1718.732227] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-922bf869-d298-4d92-8aca-7264394af468 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] [instance: 78b4c6ea-6f5b-40d8-8c4a-10332f176e0b] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1718.732571] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-15aea096-2f5d-4d16-becf-d4df78b82a91 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.741118] env[62405]: DEBUG oslo_vmware.api [None req-922bf869-d298-4d92-8aca-7264394af468 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Waiting for the task: (returnval){ [ 1718.741118] env[62405]: value = "task-1947393" [ 1718.741118] env[62405]: _type = "Task" [ 1718.741118] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1718.753035] env[62405]: DEBUG oslo_vmware.api [None req-922bf869-d298-4d92-8aca-7264394af468 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Task: {'id': task-1947393, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1718.754344] env[62405]: DEBUG oslo_concurrency.lockutils [req-a33f2a77-623a-43ab-96ea-ee2267f6c8bb req-f6d245a7-6822-4e59-ae0b-ebaace928d67 service nova] Releasing lock "refresh_cache-2c623c00-92f2-4cc4-8503-963c3308d708" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1718.982589] env[62405]: DEBUG oslo_vmware.api [None req-ffd96f4e-6412-4601-b309-798a439a57ab tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]528481f5-bc0c-cf79-736d-01d22c75c880, 'name': SearchDatastore_Task, 'duration_secs': 0.010729} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1718.982897] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ffd96f4e-6412-4601-b309-798a439a57ab tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1718.983151] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-ffd96f4e-6412-4601-b309-798a439a57ab tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 2c623c00-92f2-4cc4-8503-963c3308d708] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1718.983387] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ffd96f4e-6412-4601-b309-798a439a57ab tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1719.031053] env[62405]: DEBUG oslo_vmware.api [None req-a406d4cf-af98-42c4-bf36-7b8b7b567131 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1947391, 'name': RemoveSnapshot_Task, 'duration_secs': 0.689626} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1719.031380] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-a406d4cf-af98-42c4-bf36-7b8b7b567131 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Deleted Snapshot of the VM instance {{(pid=62405) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1719.031380] env[62405]: INFO nova.compute.manager [None req-a406d4cf-af98-42c4-bf36-7b8b7b567131 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Took 15.85 seconds to snapshot the instance on the hypervisor. [ 1719.089161] env[62405]: DEBUG oslo_vmware.api [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52543892-59f4-e6e4-8a72-75ce1d6b0b73, 'name': SearchDatastore_Task, 'duration_secs': 0.00996} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1719.089461] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1719.089741] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 4c8c0d2f-d8d3-4422-8a5c-8999636b22be/4c8c0d2f-d8d3-4422-8a5c-8999636b22be.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1719.090349] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ffd96f4e-6412-4601-b309-798a439a57ab tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1719.090602] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-ffd96f4e-6412-4601-b309-798a439a57ab tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1719.090836] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9e22f062-7330-46c2-bc01-8a40af4fd9f6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.092897] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e1fa0491-3931-4cb4-adef-9e8a8835844c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.102123] env[62405]: DEBUG oslo_vmware.api [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] Waiting for the task: (returnval){ [ 1719.102123] env[62405]: value = "task-1947394" [ 1719.102123] env[62405]: _type = "Task" [ 1719.102123] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1719.103298] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-ffd96f4e-6412-4601-b309-798a439a57ab tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1719.103551] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-ffd96f4e-6412-4601-b309-798a439a57ab tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1719.107128] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-72f91304-0095-4427-b720-14d31fcdc48c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.112946] env[62405]: DEBUG oslo_vmware.api [None req-ffd96f4e-6412-4601-b309-798a439a57ab tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Waiting for the task: (returnval){ [ 1719.112946] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]522fa2f2-3e2e-0935-84f2-9b9055d61be6" [ 1719.112946] env[62405]: _type = "Task" [ 1719.112946] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1719.117111] env[62405]: DEBUG oslo_vmware.api [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] Task: {'id': task-1947394, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1719.125339] env[62405]: DEBUG oslo_vmware.api [None req-ffd96f4e-6412-4601-b309-798a439a57ab tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]522fa2f2-3e2e-0935-84f2-9b9055d61be6, 'name': SearchDatastore_Task, 'duration_secs': 0.009439} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1719.126097] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ccc2bc57-d48c-40c3-a66f-315b7cfb8c2e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.131196] env[62405]: DEBUG oslo_vmware.api [None req-ffd96f4e-6412-4601-b309-798a439a57ab tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Waiting for the task: (returnval){ [ 1719.131196] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52fef60d-bc8e-2bb7-4c21-f28bf213269c" [ 1719.131196] env[62405]: _type = "Task" [ 1719.131196] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1719.138722] env[62405]: DEBUG oslo_vmware.api [None req-ffd96f4e-6412-4601-b309-798a439a57ab tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52fef60d-bc8e-2bb7-4c21-f28bf213269c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1719.216648] env[62405]: DEBUG nova.compute.utils [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1719.218074] env[62405]: DEBUG nova.compute.manager [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: 9b495caf-4394-40c0-b68f-d02c7d759a6a] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1719.218254] env[62405]: DEBUG nova.network.neutron [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: 9b495caf-4394-40c0-b68f-d02c7d759a6a] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1719.233806] env[62405]: DEBUG oslo_concurrency.lockutils [req-2d0e0ee3-b301-4a6e-9d3d-7b36227348ad req-285064fa-679a-41ee-b7c8-70b246790b0a service nova] Releasing lock "refresh_cache-4c8c0d2f-d8d3-4422-8a5c-8999636b22be" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1719.252523] env[62405]: DEBUG oslo_vmware.api [None req-922bf869-d298-4d92-8aca-7264394af468 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Task: {'id': task-1947393, 'name': PowerOffVM_Task, 'duration_secs': 0.241433} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1719.252797] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-922bf869-d298-4d92-8aca-7264394af468 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] [instance: 78b4c6ea-6f5b-40d8-8c4a-10332f176e0b] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1719.252966] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-922bf869-d298-4d92-8aca-7264394af468 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] [instance: 78b4c6ea-6f5b-40d8-8c4a-10332f176e0b] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1719.253268] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bbbc5978-dbb4-413e-aabc-95a9f51f8cb1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.268963] env[62405]: DEBUG nova.policy [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1f465915a21943b58ddfe2d0d5816fbc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '774aaaffb55b401eae1c919aa2f45675', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1719.399776] env[62405]: DEBUG nova.network.neutron [-] [instance: aae3abca-951a-4149-9ccb-d70bea218aea] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1719.458822] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-922bf869-d298-4d92-8aca-7264394af468 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] [instance: 78b4c6ea-6f5b-40d8-8c4a-10332f176e0b] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1719.459293] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-922bf869-d298-4d92-8aca-7264394af468 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] [instance: 78b4c6ea-6f5b-40d8-8c4a-10332f176e0b] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1719.459760] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-922bf869-d298-4d92-8aca-7264394af468 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Deleting the datastore file [datastore1] 78b4c6ea-6f5b-40d8-8c4a-10332f176e0b {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1719.460240] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-45739691-59a1-449c-869a-3fdd2b37a47b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.470052] env[62405]: DEBUG oslo_vmware.api [None req-922bf869-d298-4d92-8aca-7264394af468 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Waiting for the task: (returnval){ [ 1719.470052] env[62405]: value = "task-1947396" [ 1719.470052] env[62405]: _type = "Task" [ 1719.470052] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1719.480878] env[62405]: DEBUG oslo_vmware.api [None req-922bf869-d298-4d92-8aca-7264394af468 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Task: {'id': task-1947396, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1719.551560] env[62405]: DEBUG nova.network.neutron [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: 9b495caf-4394-40c0-b68f-d02c7d759a6a] Successfully created port: 74bb12c4-3ef6-4bc6-b5ea-810282fe3f43 {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1719.588764] env[62405]: DEBUG nova.compute.manager [None req-a406d4cf-af98-42c4-bf36-7b8b7b567131 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Found 2 images (rotation: 2) {{(pid=62405) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 1719.614868] env[62405]: DEBUG oslo_vmware.api [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] Task: {'id': task-1947394, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1719.643200] env[62405]: DEBUG oslo_vmware.api [None req-ffd96f4e-6412-4601-b309-798a439a57ab tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52fef60d-bc8e-2bb7-4c21-f28bf213269c, 'name': SearchDatastore_Task, 'duration_secs': 0.009049} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1719.643501] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ffd96f4e-6412-4601-b309-798a439a57ab tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1719.643764] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-ffd96f4e-6412-4601-b309-798a439a57ab tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 2c623c00-92f2-4cc4-8503-963c3308d708/2c623c00-92f2-4cc4-8503-963c3308d708.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1719.644231] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6c365d0a-c6b8-407b-a63a-ed26a556582b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.653428] env[62405]: DEBUG oslo_vmware.api [None req-ffd96f4e-6412-4601-b309-798a439a57ab tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Waiting for the task: (returnval){ [ 1719.653428] env[62405]: value = "task-1947397" [ 1719.653428] env[62405]: _type = "Task" [ 1719.653428] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1719.662711] env[62405]: DEBUG oslo_vmware.api [None req-ffd96f4e-6412-4601-b309-798a439a57ab tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1947397, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1719.722186] env[62405]: DEBUG nova.compute.manager [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: 9b495caf-4394-40c0-b68f-d02c7d759a6a] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1719.903511] env[62405]: INFO nova.compute.manager [-] [instance: aae3abca-951a-4149-9ccb-d70bea218aea] Took 1.26 seconds to deallocate network for instance. [ 1719.984407] env[62405]: DEBUG oslo_vmware.api [None req-922bf869-d298-4d92-8aca-7264394af468 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Task: {'id': task-1947396, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.328943} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1719.984737] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-922bf869-d298-4d92-8aca-7264394af468 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1719.984969] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-922bf869-d298-4d92-8aca-7264394af468 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] [instance: 78b4c6ea-6f5b-40d8-8c4a-10332f176e0b] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1719.985178] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-922bf869-d298-4d92-8aca-7264394af468 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] [instance: 78b4c6ea-6f5b-40d8-8c4a-10332f176e0b] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1719.985366] env[62405]: INFO nova.compute.manager [None req-922bf869-d298-4d92-8aca-7264394af468 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] [instance: 78b4c6ea-6f5b-40d8-8c4a-10332f176e0b] Took 1.27 seconds to destroy the instance on the hypervisor. [ 1719.985653] env[62405]: DEBUG oslo.service.loopingcall [None req-922bf869-d298-4d92-8aca-7264394af468 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1719.988408] env[62405]: DEBUG nova.compute.manager [-] [instance: 78b4c6ea-6f5b-40d8-8c4a-10332f176e0b] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1719.988524] env[62405]: DEBUG nova.network.neutron [-] [instance: 78b4c6ea-6f5b-40d8-8c4a-10332f176e0b] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1720.025070] env[62405]: DEBUG nova.compute.manager [req-69f85d9a-d12e-4c73-b8a3-2410e983d337 req-87fae175-47cb-4e48-9ed4-01f69b8d32fd service nova] [instance: aae3abca-951a-4149-9ccb-d70bea218aea] Received event network-vif-deleted-c2f0f942-0f3a-45ee-9b01-295f9c3a79cd {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1720.118254] env[62405]: DEBUG oslo_vmware.api [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] Task: {'id': task-1947394, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.587861} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1720.121879] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 4c8c0d2f-d8d3-4422-8a5c-8999636b22be/4c8c0d2f-d8d3-4422-8a5c-8999636b22be.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1720.122231] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] [instance: 4c8c0d2f-d8d3-4422-8a5c-8999636b22be] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1720.122892] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-28cc54bc-42e1-4838-a9b6-8765f8645ca9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.132442] env[62405]: DEBUG oslo_vmware.api [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] Waiting for the task: (returnval){ [ 1720.132442] env[62405]: value = "task-1947398" [ 1720.132442] env[62405]: _type = "Task" [ 1720.132442] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1720.146625] env[62405]: DEBUG oslo_vmware.api [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] Task: {'id': task-1947398, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1720.167400] env[62405]: DEBUG oslo_vmware.api [None req-ffd96f4e-6412-4601-b309-798a439a57ab tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1947397, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1720.268067] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a2a853a-1400-45cc-9a15-a4e27578f211 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.278296] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b88ca42-dbe0-4533-842d-76868034c32e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.311956] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e47469f8-75e7-47e1-9dc5-02df1de88f77 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.321624] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24d8ff3d-8dce-40a1-ba47-32196572a945 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.336725] env[62405]: DEBUG nova.compute.provider_tree [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1720.413853] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2d276892-99fc-4e22-a015-4f2656f78414 tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1720.543075] env[62405]: DEBUG nova.compute.manager [req-ac939d58-7008-4e9e-a90d-27d1e2b27ca2 req-f21ce685-8cad-45aa-9734-ed9279f42b07 service nova] [instance: 78b4c6ea-6f5b-40d8-8c4a-10332f176e0b] Received event network-vif-deleted-95874447-5114-44c9-8785-0134bd6173f2 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1720.543075] env[62405]: INFO nova.compute.manager [req-ac939d58-7008-4e9e-a90d-27d1e2b27ca2 req-f21ce685-8cad-45aa-9734-ed9279f42b07 service nova] [instance: 78b4c6ea-6f5b-40d8-8c4a-10332f176e0b] Neutron deleted interface 95874447-5114-44c9-8785-0134bd6173f2; detaching it from the instance and deleting it from the info cache [ 1720.543312] env[62405]: DEBUG nova.network.neutron [req-ac939d58-7008-4e9e-a90d-27d1e2b27ca2 req-f21ce685-8cad-45aa-9734-ed9279f42b07 service nova] [instance: 78b4c6ea-6f5b-40d8-8c4a-10332f176e0b] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1720.642743] env[62405]: DEBUG oslo_vmware.api [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] Task: {'id': task-1947398, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078545} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1720.643032] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] [instance: 4c8c0d2f-d8d3-4422-8a5c-8999636b22be] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1720.643849] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1af4f6dd-fc21-4369-a1b2-42953bbe7ca5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.669311] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] [instance: 4c8c0d2f-d8d3-4422-8a5c-8999636b22be] Reconfiguring VM instance instance-00000038 to attach disk [datastore1] 4c8c0d2f-d8d3-4422-8a5c-8999636b22be/4c8c0d2f-d8d3-4422-8a5c-8999636b22be.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1720.672785] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-be0e6ecf-d38d-459d-bed8-478eb27381ff {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.695576] env[62405]: DEBUG oslo_vmware.api [None req-ffd96f4e-6412-4601-b309-798a439a57ab tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1947397, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.537968} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1720.696971] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-ffd96f4e-6412-4601-b309-798a439a57ab tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 2c623c00-92f2-4cc4-8503-963c3308d708/2c623c00-92f2-4cc4-8503-963c3308d708.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1720.697279] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-ffd96f4e-6412-4601-b309-798a439a57ab tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 2c623c00-92f2-4cc4-8503-963c3308d708] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1720.697506] env[62405]: DEBUG oslo_vmware.api [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] Waiting for the task: (returnval){ [ 1720.697506] env[62405]: value = "task-1947399" [ 1720.697506] env[62405]: _type = "Task" [ 1720.697506] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1720.697672] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3bd2a3f1-69a7-4d75-ac65-ff4fbe04cbe3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.711108] env[62405]: DEBUG oslo_vmware.api [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] Task: {'id': task-1947399, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1720.712877] env[62405]: DEBUG oslo_vmware.api [None req-ffd96f4e-6412-4601-b309-798a439a57ab tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Waiting for the task: (returnval){ [ 1720.712877] env[62405]: value = "task-1947400" [ 1720.712877] env[62405]: _type = "Task" [ 1720.712877] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1720.723577] env[62405]: DEBUG oslo_vmware.api [None req-ffd96f4e-6412-4601-b309-798a439a57ab tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1947400, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1720.734346] env[62405]: DEBUG nova.compute.manager [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: 9b495caf-4394-40c0-b68f-d02c7d759a6a] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1720.759027] env[62405]: DEBUG nova.virt.hardware [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1720.759314] env[62405]: DEBUG nova.virt.hardware [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1720.759475] env[62405]: DEBUG nova.virt.hardware [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1720.759659] env[62405]: DEBUG nova.virt.hardware [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1720.759806] env[62405]: DEBUG nova.virt.hardware [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1720.759957] env[62405]: DEBUG nova.virt.hardware [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1720.760192] env[62405]: DEBUG nova.virt.hardware [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1720.760355] env[62405]: DEBUG nova.virt.hardware [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1720.760527] env[62405]: DEBUG nova.virt.hardware [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1720.760696] env[62405]: DEBUG nova.virt.hardware [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1720.760882] env[62405]: DEBUG nova.virt.hardware [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1720.761722] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d12496db-e2dd-445b-8efb-334b9f959f91 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.770587] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f128d2b1-70ef-440e-a524-30b0d0e2465b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.817890] env[62405]: DEBUG nova.network.neutron [-] [instance: 78b4c6ea-6f5b-40d8-8c4a-10332f176e0b] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1720.842047] env[62405]: DEBUG nova.scheduler.client.report [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1721.007397] env[62405]: DEBUG nova.compute.manager [None req-8b59a3e3-61d0-469c-bb7b-a8a5014805bd tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1721.008636] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c26daf69-9d8e-4010-ba44-480be325744b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.046298] env[62405]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9b926eb3-4e23-4a78-a695-1142481ff71c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.059467] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0481fb8a-6c68-440c-b7c4-7d6aab336148 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.091124] env[62405]: DEBUG nova.compute.manager [req-ac939d58-7008-4e9e-a90d-27d1e2b27ca2 req-f21ce685-8cad-45aa-9734-ed9279f42b07 service nova] [instance: 78b4c6ea-6f5b-40d8-8c4a-10332f176e0b] Detach interface failed, port_id=95874447-5114-44c9-8785-0134bd6173f2, reason: Instance 78b4c6ea-6f5b-40d8-8c4a-10332f176e0b could not be found. {{(pid=62405) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11483}} [ 1721.141264] env[62405]: DEBUG nova.network.neutron [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: 9b495caf-4394-40c0-b68f-d02c7d759a6a] Successfully updated port: 74bb12c4-3ef6-4bc6-b5ea-810282fe3f43 {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1721.210359] env[62405]: DEBUG oslo_vmware.api [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] Task: {'id': task-1947399, 'name': ReconfigVM_Task, 'duration_secs': 0.372433} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1721.210728] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] [instance: 4c8c0d2f-d8d3-4422-8a5c-8999636b22be] Reconfigured VM instance instance-00000038 to attach disk [datastore1] 4c8c0d2f-d8d3-4422-8a5c-8999636b22be/4c8c0d2f-d8d3-4422-8a5c-8999636b22be.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1721.211555] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b31bde0c-fbb6-40b6-9d7b-d0b9299d8c9a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.218976] env[62405]: DEBUG oslo_vmware.api [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] Waiting for the task: (returnval){ [ 1721.218976] env[62405]: value = "task-1947401" [ 1721.218976] env[62405]: _type = "Task" [ 1721.218976] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1721.222132] env[62405]: DEBUG oslo_vmware.api [None req-ffd96f4e-6412-4601-b309-798a439a57ab tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1947400, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075147} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1721.224961] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-ffd96f4e-6412-4601-b309-798a439a57ab tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 2c623c00-92f2-4cc4-8503-963c3308d708] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1721.225707] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d08310e6-6845-4dd0-9d27-d12ad52e9f0c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.232964] env[62405]: DEBUG oslo_vmware.api [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] Task: {'id': task-1947401, 'name': Rename_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1721.250911] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-ffd96f4e-6412-4601-b309-798a439a57ab tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 2c623c00-92f2-4cc4-8503-963c3308d708] Reconfiguring VM instance instance-00000037 to attach disk [datastore1] 2c623c00-92f2-4cc4-8503-963c3308d708/2c623c00-92f2-4cc4-8503-963c3308d708.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1721.251237] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1b988fed-0229-46d4-ab2c-600634d65385 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.272087] env[62405]: DEBUG oslo_vmware.api [None req-ffd96f4e-6412-4601-b309-798a439a57ab tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Waiting for the task: (returnval){ [ 1721.272087] env[62405]: value = "task-1947402" [ 1721.272087] env[62405]: _type = "Task" [ 1721.272087] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1721.281134] env[62405]: DEBUG oslo_vmware.api [None req-ffd96f4e-6412-4601-b309-798a439a57ab tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1947402, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1721.321010] env[62405]: INFO nova.compute.manager [-] [instance: 78b4c6ea-6f5b-40d8-8c4a-10332f176e0b] Took 1.33 seconds to deallocate network for instance. [ 1721.348260] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.633s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1721.348580] env[62405]: DEBUG nova.compute.manager [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: dbb5dda5-5420-4d7b-8b32-152d51cb2fb9] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1721.351242] env[62405]: DEBUG oslo_concurrency.lockutils [None req-33f5353e-7e6d-44ea-9d5c-3648331601cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 45.573s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1721.351439] env[62405]: DEBUG nova.objects.instance [None req-33f5353e-7e6d-44ea-9d5c-3648331601cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Lazy-loading 'resources' on Instance uuid 23748dfd-7c60-41db-8acb-7b49cf1c27db {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1721.524073] env[62405]: INFO nova.compute.manager [None req-8b59a3e3-61d0-469c-bb7b-a8a5014805bd tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] instance snapshotting [ 1721.524073] env[62405]: DEBUG nova.objects.instance [None req-8b59a3e3-61d0-469c-bb7b-a8a5014805bd tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Lazy-loading 'flavor' on Instance uuid 15218373-ffa5-49ce-b604-423b7fc5fb35 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1721.644011] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Acquiring lock "refresh_cache-9b495caf-4394-40c0-b68f-d02c7d759a6a" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1721.644193] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Acquired lock "refresh_cache-9b495caf-4394-40c0-b68f-d02c7d759a6a" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1721.644346] env[62405]: DEBUG nova.network.neutron [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: 9b495caf-4394-40c0-b68f-d02c7d759a6a] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1721.731485] env[62405]: DEBUG oslo_vmware.api [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] Task: {'id': task-1947401, 'name': Rename_Task, 'duration_secs': 0.144634} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1721.731754] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] [instance: 4c8c0d2f-d8d3-4422-8a5c-8999636b22be] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1721.731992] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ec95b3d6-1b40-483a-814b-9da34b4f60ff {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.748463] env[62405]: DEBUG oslo_vmware.api [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] Waiting for the task: (returnval){ [ 1721.748463] env[62405]: value = "task-1947403" [ 1721.748463] env[62405]: _type = "Task" [ 1721.748463] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1721.758254] env[62405]: DEBUG oslo_vmware.api [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] Task: {'id': task-1947403, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1721.782411] env[62405]: DEBUG oslo_vmware.api [None req-ffd96f4e-6412-4601-b309-798a439a57ab tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1947402, 'name': ReconfigVM_Task, 'duration_secs': 0.272705} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1721.782638] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-ffd96f4e-6412-4601-b309-798a439a57ab tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 2c623c00-92f2-4cc4-8503-963c3308d708] Reconfigured VM instance instance-00000037 to attach disk [datastore1] 2c623c00-92f2-4cc4-8503-963c3308d708/2c623c00-92f2-4cc4-8503-963c3308d708.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1721.783237] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ed2ec1fb-3651-4818-996c-c537631a611e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.793012] env[62405]: DEBUG oslo_vmware.api [None req-ffd96f4e-6412-4601-b309-798a439a57ab tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Waiting for the task: (returnval){ [ 1721.793012] env[62405]: value = "task-1947404" [ 1721.793012] env[62405]: _type = "Task" [ 1721.793012] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1721.802328] env[62405]: DEBUG oslo_vmware.api [None req-ffd96f4e-6412-4601-b309-798a439a57ab tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1947404, 'name': Rename_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1721.828860] env[62405]: DEBUG oslo_concurrency.lockutils [None req-922bf869-d298-4d92-8aca-7264394af468 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1721.854438] env[62405]: DEBUG nova.compute.utils [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1721.858765] env[62405]: DEBUG nova.compute.manager [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: dbb5dda5-5420-4d7b-8b32-152d51cb2fb9] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1721.858970] env[62405]: DEBUG nova.network.neutron [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: dbb5dda5-5420-4d7b-8b32-152d51cb2fb9] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1721.911346] env[62405]: DEBUG nova.policy [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1f465915a21943b58ddfe2d0d5816fbc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '774aaaffb55b401eae1c919aa2f45675', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1722.029854] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e2649a7-5408-4803-976e-a9e68790fb21 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.061638] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb72ab09-8216-45a6-9781-512a7e18f833 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.075423] env[62405]: DEBUG nova.compute.manager [req-01ad7775-a2f5-4a50-beab-80214db60ec4 req-fc6df5f3-57e4-4054-be0f-9a7c2d28fe33 service nova] [instance: 9b495caf-4394-40c0-b68f-d02c7d759a6a] Received event network-vif-plugged-74bb12c4-3ef6-4bc6-b5ea-810282fe3f43 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1722.075698] env[62405]: DEBUG oslo_concurrency.lockutils [req-01ad7775-a2f5-4a50-beab-80214db60ec4 req-fc6df5f3-57e4-4054-be0f-9a7c2d28fe33 service nova] Acquiring lock "9b495caf-4394-40c0-b68f-d02c7d759a6a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1722.075871] env[62405]: DEBUG oslo_concurrency.lockutils [req-01ad7775-a2f5-4a50-beab-80214db60ec4 req-fc6df5f3-57e4-4054-be0f-9a7c2d28fe33 service nova] Lock "9b495caf-4394-40c0-b68f-d02c7d759a6a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1722.076116] env[62405]: DEBUG oslo_concurrency.lockutils [req-01ad7775-a2f5-4a50-beab-80214db60ec4 req-fc6df5f3-57e4-4054-be0f-9a7c2d28fe33 service nova] Lock "9b495caf-4394-40c0-b68f-d02c7d759a6a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1722.076313] env[62405]: DEBUG nova.compute.manager [req-01ad7775-a2f5-4a50-beab-80214db60ec4 req-fc6df5f3-57e4-4054-be0f-9a7c2d28fe33 service nova] [instance: 9b495caf-4394-40c0-b68f-d02c7d759a6a] No waiting events found dispatching network-vif-plugged-74bb12c4-3ef6-4bc6-b5ea-810282fe3f43 {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1722.076532] env[62405]: WARNING nova.compute.manager [req-01ad7775-a2f5-4a50-beab-80214db60ec4 req-fc6df5f3-57e4-4054-be0f-9a7c2d28fe33 service nova] [instance: 9b495caf-4394-40c0-b68f-d02c7d759a6a] Received unexpected event network-vif-plugged-74bb12c4-3ef6-4bc6-b5ea-810282fe3f43 for instance with vm_state building and task_state spawning. [ 1722.076706] env[62405]: DEBUG nova.compute.manager [req-01ad7775-a2f5-4a50-beab-80214db60ec4 req-fc6df5f3-57e4-4054-be0f-9a7c2d28fe33 service nova] [instance: 9b495caf-4394-40c0-b68f-d02c7d759a6a] Received event network-changed-74bb12c4-3ef6-4bc6-b5ea-810282fe3f43 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1722.076863] env[62405]: DEBUG nova.compute.manager [req-01ad7775-a2f5-4a50-beab-80214db60ec4 req-fc6df5f3-57e4-4054-be0f-9a7c2d28fe33 service nova] [instance: 9b495caf-4394-40c0-b68f-d02c7d759a6a] Refreshing instance network info cache due to event network-changed-74bb12c4-3ef6-4bc6-b5ea-810282fe3f43. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1722.077041] env[62405]: DEBUG oslo_concurrency.lockutils [req-01ad7775-a2f5-4a50-beab-80214db60ec4 req-fc6df5f3-57e4-4054-be0f-9a7c2d28fe33 service nova] Acquiring lock "refresh_cache-9b495caf-4394-40c0-b68f-d02c7d759a6a" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1722.198863] env[62405]: DEBUG nova.network.neutron [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: 9b495caf-4394-40c0-b68f-d02c7d759a6a] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1722.212954] env[62405]: DEBUG nova.network.neutron [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: dbb5dda5-5420-4d7b-8b32-152d51cb2fb9] Successfully created port: eba12fff-0f73-414b-bcdd-e9abed9edc58 {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1722.259884] env[62405]: DEBUG oslo_vmware.api [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] Task: {'id': task-1947403, 'name': PowerOnVM_Task, 'duration_secs': 0.479601} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1722.262874] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] [instance: 4c8c0d2f-d8d3-4422-8a5c-8999636b22be] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1722.263105] env[62405]: INFO nova.compute.manager [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] [instance: 4c8c0d2f-d8d3-4422-8a5c-8999636b22be] Took 10.51 seconds to spawn the instance on the hypervisor. [ 1722.263575] env[62405]: DEBUG nova.compute.manager [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] [instance: 4c8c0d2f-d8d3-4422-8a5c-8999636b22be] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1722.264578] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1e0bbaf-0f68-4f28-bdad-fc14b6b1b64f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.305567] env[62405]: DEBUG oslo_vmware.api [None req-ffd96f4e-6412-4601-b309-798a439a57ab tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1947404, 'name': Rename_Task, 'duration_secs': 0.168653} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1722.305854] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-ffd96f4e-6412-4601-b309-798a439a57ab tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 2c623c00-92f2-4cc4-8503-963c3308d708] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1722.306148] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9f79bed1-5d8d-4ff7-b0d8-7fdaff6e266e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.320367] env[62405]: DEBUG oslo_vmware.api [None req-ffd96f4e-6412-4601-b309-798a439a57ab tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Waiting for the task: (returnval){ [ 1722.320367] env[62405]: value = "task-1947405" [ 1722.320367] env[62405]: _type = "Task" [ 1722.320367] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1722.330563] env[62405]: DEBUG oslo_vmware.api [None req-ffd96f4e-6412-4601-b309-798a439a57ab tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1947405, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1722.358854] env[62405]: DEBUG nova.compute.manager [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: dbb5dda5-5420-4d7b-8b32-152d51cb2fb9] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1722.416954] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5747d63-7a38-4ad6-9f17-6552223c9f7f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.426776] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbc7b13e-92a3-4c38-a907-864d32ff8446 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.432296] env[62405]: DEBUG nova.network.neutron [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: 9b495caf-4394-40c0-b68f-d02c7d759a6a] Updating instance_info_cache with network_info: [{"id": "74bb12c4-3ef6-4bc6-b5ea-810282fe3f43", "address": "fa:16:3e:4b:fe:78", "network": {"id": "954a06a6-91b0-4e43-a964-815d7cb120a7", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1314830830-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "774aaaffb55b401eae1c919aa2f45675", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53955f0e-c162-4cef-8bd5-335b369c36b6", "external-id": "nsx-vlan-transportzone-623", "segmentation_id": 623, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap74bb12c4-3e", "ovs_interfaceid": "74bb12c4-3ef6-4bc6-b5ea-810282fe3f43", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1722.467366] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Releasing lock "refresh_cache-9b495caf-4394-40c0-b68f-d02c7d759a6a" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1722.468031] env[62405]: DEBUG nova.compute.manager [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: 9b495caf-4394-40c0-b68f-d02c7d759a6a] Instance network_info: |[{"id": "74bb12c4-3ef6-4bc6-b5ea-810282fe3f43", "address": "fa:16:3e:4b:fe:78", "network": {"id": "954a06a6-91b0-4e43-a964-815d7cb120a7", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1314830830-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "774aaaffb55b401eae1c919aa2f45675", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53955f0e-c162-4cef-8bd5-335b369c36b6", "external-id": "nsx-vlan-transportzone-623", "segmentation_id": 623, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap74bb12c4-3e", "ovs_interfaceid": "74bb12c4-3ef6-4bc6-b5ea-810282fe3f43", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1722.468768] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d241295-682d-4fc9-bb60-7f404808948d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.471730] env[62405]: DEBUG oslo_concurrency.lockutils [req-01ad7775-a2f5-4a50-beab-80214db60ec4 req-fc6df5f3-57e4-4054-be0f-9a7c2d28fe33 service nova] Acquired lock "refresh_cache-9b495caf-4394-40c0-b68f-d02c7d759a6a" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1722.471921] env[62405]: DEBUG nova.network.neutron [req-01ad7775-a2f5-4a50-beab-80214db60ec4 req-fc6df5f3-57e4-4054-be0f-9a7c2d28fe33 service nova] [instance: 9b495caf-4394-40c0-b68f-d02c7d759a6a] Refreshing network info cache for port 74bb12c4-3ef6-4bc6-b5ea-810282fe3f43 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1722.473213] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: 9b495caf-4394-40c0-b68f-d02c7d759a6a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4b:fe:78', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '53955f0e-c162-4cef-8bd5-335b369c36b6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '74bb12c4-3ef6-4bc6-b5ea-810282fe3f43', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1722.480962] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Creating folder: Project (774aaaffb55b401eae1c919aa2f45675). Parent ref: group-v401284. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1722.481609] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e7b805c9-1ffb-47a0-813b-164381db559e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.492330] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a053e61-4d3b-4928-856e-19d3ae3d124a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.502108] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Created folder: Project (774aaaffb55b401eae1c919aa2f45675) in parent group-v401284. [ 1722.502319] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Creating folder: Instances. Parent ref: group-v401460. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1722.503169] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9b58ce84-1e21-4f4f-94d1-eda78476251a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.512868] env[62405]: DEBUG nova.compute.provider_tree [None req-33f5353e-7e6d-44ea-9d5c-3648331601cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1722.518323] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Created folder: Instances in parent group-v401460. [ 1722.518323] env[62405]: DEBUG oslo.service.loopingcall [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1722.518323] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9b495caf-4394-40c0-b68f-d02c7d759a6a] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1722.518323] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-73424ed9-96c3-4815-9163-2b9c86dbf915 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.539459] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1722.539459] env[62405]: value = "task-1947408" [ 1722.539459] env[62405]: _type = "Task" [ 1722.539459] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1722.547980] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947408, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1722.574724] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-8b59a3e3-61d0-469c-bb7b-a8a5014805bd tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Creating Snapshot of the VM instance {{(pid=62405) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1722.576729] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-11ca78a3-1ec8-4e8c-bdee-552be6a5cb0d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.582428] env[62405]: DEBUG oslo_vmware.api [None req-8b59a3e3-61d0-469c-bb7b-a8a5014805bd tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for the task: (returnval){ [ 1722.582428] env[62405]: value = "task-1947409" [ 1722.582428] env[62405]: _type = "Task" [ 1722.582428] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1722.592653] env[62405]: DEBUG oslo_vmware.api [None req-8b59a3e3-61d0-469c-bb7b-a8a5014805bd tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1947409, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1722.788830] env[62405]: INFO nova.compute.manager [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] [instance: 4c8c0d2f-d8d3-4422-8a5c-8999636b22be] Took 53.11 seconds to build instance. [ 1722.834855] env[62405]: DEBUG oslo_vmware.api [None req-ffd96f4e-6412-4601-b309-798a439a57ab tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1947405, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1723.018873] env[62405]: DEBUG nova.scheduler.client.report [None req-33f5353e-7e6d-44ea-9d5c-3648331601cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1723.050264] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947408, 'name': CreateVM_Task, 'duration_secs': 0.379933} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1723.050947] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9b495caf-4394-40c0-b68f-d02c7d759a6a] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1723.051681] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1723.052508] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1723.052508] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1723.052508] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-401be700-59f8-4665-8490-f1acc19a58ea {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.058417] env[62405]: DEBUG oslo_vmware.api [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Waiting for the task: (returnval){ [ 1723.058417] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52892e28-3fd3-6879-b8b8-bc4d73b2e045" [ 1723.058417] env[62405]: _type = "Task" [ 1723.058417] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1723.068390] env[62405]: DEBUG oslo_vmware.api [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52892e28-3fd3-6879-b8b8-bc4d73b2e045, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1723.091968] env[62405]: DEBUG oslo_vmware.api [None req-8b59a3e3-61d0-469c-bb7b-a8a5014805bd tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1947409, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1723.289944] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9bace0a3-194d-44c8-9ac2-b67c892a455b tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] Lock "4c8c0d2f-d8d3-4422-8a5c-8999636b22be" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 54.615s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1723.331008] env[62405]: DEBUG oslo_vmware.api [None req-ffd96f4e-6412-4601-b309-798a439a57ab tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1947405, 'name': PowerOnVM_Task, 'duration_secs': 0.564749} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1723.331295] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-ffd96f4e-6412-4601-b309-798a439a57ab tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 2c623c00-92f2-4cc4-8503-963c3308d708] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1723.331497] env[62405]: INFO nova.compute.manager [None req-ffd96f4e-6412-4601-b309-798a439a57ab tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 2c623c00-92f2-4cc4-8503-963c3308d708] Took 14.40 seconds to spawn the instance on the hypervisor. [ 1723.331675] env[62405]: DEBUG nova.compute.manager [None req-ffd96f4e-6412-4601-b309-798a439a57ab tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 2c623c00-92f2-4cc4-8503-963c3308d708] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1723.332520] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bb89394-aa3c-4fcb-986a-bbb1296ea4d5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.361448] env[62405]: DEBUG nova.network.neutron [req-01ad7775-a2f5-4a50-beab-80214db60ec4 req-fc6df5f3-57e4-4054-be0f-9a7c2d28fe33 service nova] [instance: 9b495caf-4394-40c0-b68f-d02c7d759a6a] Updated VIF entry in instance network info cache for port 74bb12c4-3ef6-4bc6-b5ea-810282fe3f43. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1723.362325] env[62405]: DEBUG nova.network.neutron [req-01ad7775-a2f5-4a50-beab-80214db60ec4 req-fc6df5f3-57e4-4054-be0f-9a7c2d28fe33 service nova] [instance: 9b495caf-4394-40c0-b68f-d02c7d759a6a] Updating instance_info_cache with network_info: [{"id": "74bb12c4-3ef6-4bc6-b5ea-810282fe3f43", "address": "fa:16:3e:4b:fe:78", "network": {"id": "954a06a6-91b0-4e43-a964-815d7cb120a7", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1314830830-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "774aaaffb55b401eae1c919aa2f45675", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53955f0e-c162-4cef-8bd5-335b369c36b6", "external-id": "nsx-vlan-transportzone-623", "segmentation_id": 623, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap74bb12c4-3e", "ovs_interfaceid": "74bb12c4-3ef6-4bc6-b5ea-810282fe3f43", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1723.374766] env[62405]: DEBUG nova.compute.manager [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: dbb5dda5-5420-4d7b-8b32-152d51cb2fb9] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1723.399051] env[62405]: DEBUG nova.virt.hardware [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1723.399306] env[62405]: DEBUG nova.virt.hardware [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1723.399466] env[62405]: DEBUG nova.virt.hardware [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1723.399651] env[62405]: DEBUG nova.virt.hardware [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1723.399800] env[62405]: DEBUG nova.virt.hardware [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1723.399948] env[62405]: DEBUG nova.virt.hardware [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1723.400280] env[62405]: DEBUG nova.virt.hardware [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1723.400456] env[62405]: DEBUG nova.virt.hardware [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1723.400655] env[62405]: DEBUG nova.virt.hardware [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1723.400836] env[62405]: DEBUG nova.virt.hardware [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1723.401024] env[62405]: DEBUG nova.virt.hardware [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1723.401865] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87992a65-34e5-460a-bf90-c335776b9a7a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.410909] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a60edfac-bfb1-4b19-98cd-daf0acd2c9b1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.530235] env[62405]: DEBUG oslo_concurrency.lockutils [None req-33f5353e-7e6d-44ea-9d5c-3648331601cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.176s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1723.530235] env[62405]: DEBUG oslo_concurrency.lockutils [None req-85c5669c-3ad3-487a-b7d6-d43292c71dd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 47.447s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1723.530235] env[62405]: DEBUG nova.objects.instance [None req-85c5669c-3ad3-487a-b7d6-d43292c71dd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a6a0e918-425d-44de-a22b-8779e9108533] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62405) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1723.562301] env[62405]: INFO nova.scheduler.client.report [None req-33f5353e-7e6d-44ea-9d5c-3648331601cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Deleted allocations for instance 23748dfd-7c60-41db-8acb-7b49cf1c27db [ 1723.576336] env[62405]: DEBUG oslo_vmware.api [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52892e28-3fd3-6879-b8b8-bc4d73b2e045, 'name': SearchDatastore_Task, 'duration_secs': 0.012003} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1723.576895] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1723.577390] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: 9b495caf-4394-40c0-b68f-d02c7d759a6a] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1723.577783] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1723.578092] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1723.580075] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1723.580075] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cb6686be-123a-4189-8162-839ce87b3607 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.593024] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1723.593024] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1723.593024] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-964c8030-ec8f-4d59-92d6-397cb371c0d5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.597906] env[62405]: DEBUG oslo_vmware.api [None req-8b59a3e3-61d0-469c-bb7b-a8a5014805bd tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1947409, 'name': CreateSnapshot_Task, 'duration_secs': 0.559346} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1723.598657] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-8b59a3e3-61d0-469c-bb7b-a8a5014805bd tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Created Snapshot of the VM instance {{(pid=62405) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1723.599764] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8425311-735a-4aa5-96b3-5a04dc4133d5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.603395] env[62405]: DEBUG oslo_vmware.api [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Waiting for the task: (returnval){ [ 1723.603395] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]524a963c-8dab-bf1f-44c0-a87509c7402f" [ 1723.603395] env[62405]: _type = "Task" [ 1723.603395] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1723.620634] env[62405]: DEBUG oslo_vmware.api [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]524a963c-8dab-bf1f-44c0-a87509c7402f, 'name': SearchDatastore_Task, 'duration_secs': 0.010623} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1723.622121] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-594b66ef-34de-41ae-a189-6b07cdb76361 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.629097] env[62405]: DEBUG oslo_vmware.api [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Waiting for the task: (returnval){ [ 1723.629097] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52977f13-2a31-f321-53ed-c145a3d89e08" [ 1723.629097] env[62405]: _type = "Task" [ 1723.629097] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1723.639471] env[62405]: DEBUG oslo_vmware.api [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52977f13-2a31-f321-53ed-c145a3d89e08, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1723.792746] env[62405]: DEBUG nova.compute.manager [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] [instance: f410acd2-f786-43bd-ad60-0a6248dedb1c] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1723.850843] env[62405]: INFO nova.compute.manager [None req-ffd96f4e-6412-4601-b309-798a439a57ab tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 2c623c00-92f2-4cc4-8503-963c3308d708] Took 54.81 seconds to build instance. [ 1723.864483] env[62405]: DEBUG oslo_concurrency.lockutils [req-01ad7775-a2f5-4a50-beab-80214db60ec4 req-fc6df5f3-57e4-4054-be0f-9a7c2d28fe33 service nova] Releasing lock "refresh_cache-9b495caf-4394-40c0-b68f-d02c7d759a6a" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1723.978615] env[62405]: DEBUG nova.network.neutron [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: dbb5dda5-5420-4d7b-8b32-152d51cb2fb9] Successfully updated port: eba12fff-0f73-414b-bcdd-e9abed9edc58 {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1724.073346] env[62405]: DEBUG oslo_concurrency.lockutils [None req-33f5353e-7e6d-44ea-9d5c-3648331601cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Lock "23748dfd-7c60-41db-8acb-7b49cf1c27db" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 52.619s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1724.130047] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-8b59a3e3-61d0-469c-bb7b-a8a5014805bd tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Creating linked-clone VM from snapshot {{(pid=62405) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1724.130047] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-44299917-8f2f-4f6f-9274-1c44026f5242 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.142800] env[62405]: DEBUG oslo_vmware.api [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52977f13-2a31-f321-53ed-c145a3d89e08, 'name': SearchDatastore_Task, 'duration_secs': 0.010345} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1724.144693] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1724.144693] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 9b495caf-4394-40c0-b68f-d02c7d759a6a/9b495caf-4394-40c0-b68f-d02c7d759a6a.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1724.144970] env[62405]: DEBUG oslo_vmware.api [None req-8b59a3e3-61d0-469c-bb7b-a8a5014805bd tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for the task: (returnval){ [ 1724.144970] env[62405]: value = "task-1947410" [ 1724.144970] env[62405]: _type = "Task" [ 1724.144970] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1724.145217] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fa77c12f-7ceb-4198-ae92-4b64c92d826f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.157618] env[62405]: DEBUG oslo_vmware.api [None req-8b59a3e3-61d0-469c-bb7b-a8a5014805bd tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1947410, 'name': CloneVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1724.159042] env[62405]: DEBUG oslo_vmware.api [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Waiting for the task: (returnval){ [ 1724.159042] env[62405]: value = "task-1947411" [ 1724.159042] env[62405]: _type = "Task" [ 1724.159042] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1724.167583] env[62405]: DEBUG oslo_vmware.api [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Task: {'id': task-1947411, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1724.222331] env[62405]: DEBUG nova.compute.manager [req-4b522e32-cb46-4d04-9aca-d13c8c3a6337 req-6508b14b-40ff-4a3d-89bb-b838d2591b04 service nova] [instance: dbb5dda5-5420-4d7b-8b32-152d51cb2fb9] Received event network-vif-plugged-eba12fff-0f73-414b-bcdd-e9abed9edc58 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1724.222701] env[62405]: DEBUG oslo_concurrency.lockutils [req-4b522e32-cb46-4d04-9aca-d13c8c3a6337 req-6508b14b-40ff-4a3d-89bb-b838d2591b04 service nova] Acquiring lock "dbb5dda5-5420-4d7b-8b32-152d51cb2fb9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1724.222992] env[62405]: DEBUG oslo_concurrency.lockutils [req-4b522e32-cb46-4d04-9aca-d13c8c3a6337 req-6508b14b-40ff-4a3d-89bb-b838d2591b04 service nova] Lock "dbb5dda5-5420-4d7b-8b32-152d51cb2fb9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1724.223237] env[62405]: DEBUG oslo_concurrency.lockutils [req-4b522e32-cb46-4d04-9aca-d13c8c3a6337 req-6508b14b-40ff-4a3d-89bb-b838d2591b04 service nova] Lock "dbb5dda5-5420-4d7b-8b32-152d51cb2fb9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1724.223591] env[62405]: DEBUG nova.compute.manager [req-4b522e32-cb46-4d04-9aca-d13c8c3a6337 req-6508b14b-40ff-4a3d-89bb-b838d2591b04 service nova] [instance: dbb5dda5-5420-4d7b-8b32-152d51cb2fb9] No waiting events found dispatching network-vif-plugged-eba12fff-0f73-414b-bcdd-e9abed9edc58 {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1724.223687] env[62405]: WARNING nova.compute.manager [req-4b522e32-cb46-4d04-9aca-d13c8c3a6337 req-6508b14b-40ff-4a3d-89bb-b838d2591b04 service nova] [instance: dbb5dda5-5420-4d7b-8b32-152d51cb2fb9] Received unexpected event network-vif-plugged-eba12fff-0f73-414b-bcdd-e9abed9edc58 for instance with vm_state building and task_state spawning. [ 1724.224047] env[62405]: DEBUG nova.compute.manager [req-4b522e32-cb46-4d04-9aca-d13c8c3a6337 req-6508b14b-40ff-4a3d-89bb-b838d2591b04 service nova] [instance: dbb5dda5-5420-4d7b-8b32-152d51cb2fb9] Received event network-changed-eba12fff-0f73-414b-bcdd-e9abed9edc58 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1724.224102] env[62405]: DEBUG nova.compute.manager [req-4b522e32-cb46-4d04-9aca-d13c8c3a6337 req-6508b14b-40ff-4a3d-89bb-b838d2591b04 service nova] [instance: dbb5dda5-5420-4d7b-8b32-152d51cb2fb9] Refreshing instance network info cache due to event network-changed-eba12fff-0f73-414b-bcdd-e9abed9edc58. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1724.224292] env[62405]: DEBUG oslo_concurrency.lockutils [req-4b522e32-cb46-4d04-9aca-d13c8c3a6337 req-6508b14b-40ff-4a3d-89bb-b838d2591b04 service nova] Acquiring lock "refresh_cache-dbb5dda5-5420-4d7b-8b32-152d51cb2fb9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1724.224426] env[62405]: DEBUG oslo_concurrency.lockutils [req-4b522e32-cb46-4d04-9aca-d13c8c3a6337 req-6508b14b-40ff-4a3d-89bb-b838d2591b04 service nova] Acquired lock "refresh_cache-dbb5dda5-5420-4d7b-8b32-152d51cb2fb9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1724.224585] env[62405]: DEBUG nova.network.neutron [req-4b522e32-cb46-4d04-9aca-d13c8c3a6337 req-6508b14b-40ff-4a3d-89bb-b838d2591b04 service nova] [instance: dbb5dda5-5420-4d7b-8b32-152d51cb2fb9] Refreshing network info cache for port eba12fff-0f73-414b-bcdd-e9abed9edc58 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1724.322309] env[62405]: DEBUG oslo_concurrency.lockutils [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1724.353272] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ffd96f4e-6412-4601-b309-798a439a57ab tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Lock "2c623c00-92f2-4cc4-8503-963c3308d708" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 56.325s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1724.484194] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Acquiring lock "refresh_cache-dbb5dda5-5420-4d7b-8b32-152d51cb2fb9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1724.541759] env[62405]: DEBUG oslo_concurrency.lockutils [None req-85c5669c-3ad3-487a-b7d6-d43292c71dd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.011s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1724.542329] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9f69f501-45bd-4c8d-8379-84744a1a9aba tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 46.875s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1724.543983] env[62405]: INFO nova.compute.claims [None req-9f69f501-45bd-4c8d-8379-84744a1a9aba tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 79548471-56f8-410c-a664-d2242541cd2a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1724.671404] env[62405]: DEBUG oslo_vmware.api [None req-8b59a3e3-61d0-469c-bb7b-a8a5014805bd tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1947410, 'name': CloneVM_Task} progress is 94%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1724.677926] env[62405]: DEBUG oslo_vmware.api [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Task: {'id': task-1947411, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1724.793292] env[62405]: DEBUG nova.network.neutron [req-4b522e32-cb46-4d04-9aca-d13c8c3a6337 req-6508b14b-40ff-4a3d-89bb-b838d2591b04 service nova] [instance: dbb5dda5-5420-4d7b-8b32-152d51cb2fb9] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1724.856304] env[62405]: DEBUG nova.compute.manager [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 7256b956-e41a-40ec-a687-a129a8bafcb6] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1725.142565] env[62405]: DEBUG nova.network.neutron [req-4b522e32-cb46-4d04-9aca-d13c8c3a6337 req-6508b14b-40ff-4a3d-89bb-b838d2591b04 service nova] [instance: dbb5dda5-5420-4d7b-8b32-152d51cb2fb9] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1725.159624] env[62405]: DEBUG oslo_vmware.api [None req-8b59a3e3-61d0-469c-bb7b-a8a5014805bd tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1947410, 'name': CloneVM_Task} progress is 95%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1725.169415] env[62405]: DEBUG oslo_vmware.api [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Task: {'id': task-1947411, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.527448} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1725.169927] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 9b495caf-4394-40c0-b68f-d02c7d759a6a/9b495caf-4394-40c0-b68f-d02c7d759a6a.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1725.170278] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: 9b495caf-4394-40c0-b68f-d02c7d759a6a] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1725.172590] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a3afc5a3-6c11-4392-9915-5bd08b195548 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.191633] env[62405]: DEBUG oslo_vmware.api [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Waiting for the task: (returnval){ [ 1725.191633] env[62405]: value = "task-1947412" [ 1725.191633] env[62405]: _type = "Task" [ 1725.191633] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1725.204478] env[62405]: DEBUG oslo_vmware.api [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Task: {'id': task-1947412, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1725.394754] env[62405]: DEBUG oslo_concurrency.lockutils [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1725.647776] env[62405]: DEBUG oslo_concurrency.lockutils [req-4b522e32-cb46-4d04-9aca-d13c8c3a6337 req-6508b14b-40ff-4a3d-89bb-b838d2591b04 service nova] Releasing lock "refresh_cache-dbb5dda5-5420-4d7b-8b32-152d51cb2fb9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1725.648238] env[62405]: DEBUG nova.compute.manager [req-4b522e32-cb46-4d04-9aca-d13c8c3a6337 req-6508b14b-40ff-4a3d-89bb-b838d2591b04 service nova] [instance: 4c8c0d2f-d8d3-4422-8a5c-8999636b22be] Received event network-changed-181e34ed-64d1-4e72-8ea6-a8e10f831868 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1725.648512] env[62405]: DEBUG nova.compute.manager [req-4b522e32-cb46-4d04-9aca-d13c8c3a6337 req-6508b14b-40ff-4a3d-89bb-b838d2591b04 service nova] [instance: 4c8c0d2f-d8d3-4422-8a5c-8999636b22be] Refreshing instance network info cache due to event network-changed-181e34ed-64d1-4e72-8ea6-a8e10f831868. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1725.648800] env[62405]: DEBUG oslo_concurrency.lockutils [req-4b522e32-cb46-4d04-9aca-d13c8c3a6337 req-6508b14b-40ff-4a3d-89bb-b838d2591b04 service nova] Acquiring lock "refresh_cache-4c8c0d2f-d8d3-4422-8a5c-8999636b22be" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1725.649017] env[62405]: DEBUG oslo_concurrency.lockutils [req-4b522e32-cb46-4d04-9aca-d13c8c3a6337 req-6508b14b-40ff-4a3d-89bb-b838d2591b04 service nova] Acquired lock "refresh_cache-4c8c0d2f-d8d3-4422-8a5c-8999636b22be" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1725.649535] env[62405]: DEBUG nova.network.neutron [req-4b522e32-cb46-4d04-9aca-d13c8c3a6337 req-6508b14b-40ff-4a3d-89bb-b838d2591b04 service nova] [instance: 4c8c0d2f-d8d3-4422-8a5c-8999636b22be] Refreshing network info cache for port 181e34ed-64d1-4e72-8ea6-a8e10f831868 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1725.650894] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Acquired lock "refresh_cache-dbb5dda5-5420-4d7b-8b32-152d51cb2fb9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1725.651124] env[62405]: DEBUG nova.network.neutron [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: dbb5dda5-5420-4d7b-8b32-152d51cb2fb9] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1725.664799] env[62405]: DEBUG oslo_vmware.api [None req-8b59a3e3-61d0-469c-bb7b-a8a5014805bd tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1947410, 'name': CloneVM_Task} progress is 100%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1725.705815] env[62405]: DEBUG oslo_vmware.api [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Task: {'id': task-1947412, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078725} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1725.705815] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: 9b495caf-4394-40c0-b68f-d02c7d759a6a] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1725.706231] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77aa238a-f06d-4f11-8a33-cf85f288b460 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.729129] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: 9b495caf-4394-40c0-b68f-d02c7d759a6a] Reconfiguring VM instance instance-00000039 to attach disk [datastore1] 9b495caf-4394-40c0-b68f-d02c7d759a6a/9b495caf-4394-40c0-b68f-d02c7d759a6a.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1725.729436] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-70e777c4-fbd6-4e45-9340-222a69f0223e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.753569] env[62405]: DEBUG oslo_vmware.api [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Waiting for the task: (returnval){ [ 1725.753569] env[62405]: value = "task-1947413" [ 1725.753569] env[62405]: _type = "Task" [ 1725.753569] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1725.764473] env[62405]: DEBUG oslo_vmware.api [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Task: {'id': task-1947413, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1726.078330] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c63c2aaa-280f-46cb-9123-ea2a40dae73b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.086507] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd8d5129-d352-4123-96a3-2de4633b03f6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.121865] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46e4e557-c39b-4ea0-b896-f822a26f1a05 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.131792] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3409fd1-c14d-4d04-8e1a-42968c46980d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.146025] env[62405]: DEBUG nova.compute.provider_tree [None req-9f69f501-45bd-4c8d-8379-84744a1a9aba tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1726.167665] env[62405]: DEBUG oslo_vmware.api [None req-8b59a3e3-61d0-469c-bb7b-a8a5014805bd tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1947410, 'name': CloneVM_Task, 'duration_secs': 1.633847} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1726.167952] env[62405]: INFO nova.virt.vmwareapi.vmops [None req-8b59a3e3-61d0-469c-bb7b-a8a5014805bd tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Created linked-clone VM from snapshot [ 1726.168697] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc98477d-e2d8-4e9b-8eaa-89be6837d964 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.190288] env[62405]: DEBUG nova.virt.vmwareapi.images [None req-8b59a3e3-61d0-469c-bb7b-a8a5014805bd tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Uploading image e3b71764-21b0-4151-a85f-eb549854f430 {{(pid=62405) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1726.210508] env[62405]: DEBUG nova.network.neutron [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: dbb5dda5-5420-4d7b-8b32-152d51cb2fb9] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1726.225705] env[62405]: DEBUG oslo_vmware.rw_handles [None req-8b59a3e3-61d0-469c-bb7b-a8a5014805bd tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1726.225705] env[62405]: value = "vm-401464" [ 1726.225705] env[62405]: _type = "VirtualMachine" [ 1726.225705] env[62405]: }. {{(pid=62405) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1726.226404] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-d3d785ff-020f-47e3-ae10-7596628df8c3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.237165] env[62405]: DEBUG oslo_vmware.rw_handles [None req-8b59a3e3-61d0-469c-bb7b-a8a5014805bd tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Lease: (returnval){ [ 1726.237165] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52d167a4-9eef-d186-e7b5-962d423395b8" [ 1726.237165] env[62405]: _type = "HttpNfcLease" [ 1726.237165] env[62405]: } obtained for exporting VM: (result){ [ 1726.237165] env[62405]: value = "vm-401464" [ 1726.237165] env[62405]: _type = "VirtualMachine" [ 1726.237165] env[62405]: }. {{(pid=62405) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1726.237594] env[62405]: DEBUG oslo_vmware.api [None req-8b59a3e3-61d0-469c-bb7b-a8a5014805bd tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for the lease: (returnval){ [ 1726.237594] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52d167a4-9eef-d186-e7b5-962d423395b8" [ 1726.237594] env[62405]: _type = "HttpNfcLease" [ 1726.237594] env[62405]: } to be ready. {{(pid=62405) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1726.245905] env[62405]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1726.245905] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52d167a4-9eef-d186-e7b5-962d423395b8" [ 1726.245905] env[62405]: _type = "HttpNfcLease" [ 1726.245905] env[62405]: } is initializing. {{(pid=62405) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1726.265103] env[62405]: DEBUG oslo_vmware.api [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Task: {'id': task-1947413, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1726.298530] env[62405]: DEBUG nova.compute.manager [req-c99feaff-75e8-4613-950b-9bb9d9343869 req-c3d05137-0532-4b85-936e-face949eb8f4 service nova] [instance: 2c623c00-92f2-4cc4-8503-963c3308d708] Received event network-changed-3cb3354b-4416-4325-9602-8abc5afe9861 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1726.298788] env[62405]: DEBUG nova.compute.manager [req-c99feaff-75e8-4613-950b-9bb9d9343869 req-c3d05137-0532-4b85-936e-face949eb8f4 service nova] [instance: 2c623c00-92f2-4cc4-8503-963c3308d708] Refreshing instance network info cache due to event network-changed-3cb3354b-4416-4325-9602-8abc5afe9861. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1726.299152] env[62405]: DEBUG oslo_concurrency.lockutils [req-c99feaff-75e8-4613-950b-9bb9d9343869 req-c3d05137-0532-4b85-936e-face949eb8f4 service nova] Acquiring lock "refresh_cache-2c623c00-92f2-4cc4-8503-963c3308d708" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1726.299228] env[62405]: DEBUG oslo_concurrency.lockutils [req-c99feaff-75e8-4613-950b-9bb9d9343869 req-c3d05137-0532-4b85-936e-face949eb8f4 service nova] Acquired lock "refresh_cache-2c623c00-92f2-4cc4-8503-963c3308d708" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1726.299328] env[62405]: DEBUG nova.network.neutron [req-c99feaff-75e8-4613-950b-9bb9d9343869 req-c3d05137-0532-4b85-936e-face949eb8f4 service nova] [instance: 2c623c00-92f2-4cc4-8503-963c3308d708] Refreshing network info cache for port 3cb3354b-4416-4325-9602-8abc5afe9861 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1726.415245] env[62405]: DEBUG nova.network.neutron [req-4b522e32-cb46-4d04-9aca-d13c8c3a6337 req-6508b14b-40ff-4a3d-89bb-b838d2591b04 service nova] [instance: 4c8c0d2f-d8d3-4422-8a5c-8999636b22be] Updated VIF entry in instance network info cache for port 181e34ed-64d1-4e72-8ea6-a8e10f831868. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1726.415651] env[62405]: DEBUG nova.network.neutron [req-4b522e32-cb46-4d04-9aca-d13c8c3a6337 req-6508b14b-40ff-4a3d-89bb-b838d2591b04 service nova] [instance: 4c8c0d2f-d8d3-4422-8a5c-8999636b22be] Updating instance_info_cache with network_info: [{"id": "181e34ed-64d1-4e72-8ea6-a8e10f831868", "address": "fa:16:3e:10:6a:cc", "network": {"id": "c58a90d3-fe0c-4747-a255-d28e444a8fc0", "bridge": "br-int", "label": "tempest-ServersTestJSON-1500033972-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.242", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2337ac45a39041268ce9221de30e16af", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "04e15990-16e1-4cb2-b0f0-06c362e68c5e", "external-id": "nsx-vlan-transportzone-555", "segmentation_id": 555, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap181e34ed-64", "ovs_interfaceid": "181e34ed-64d1-4e72-8ea6-a8e10f831868", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1726.591364] env[62405]: DEBUG nova.network.neutron [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: dbb5dda5-5420-4d7b-8b32-152d51cb2fb9] Updating instance_info_cache with network_info: [{"id": "eba12fff-0f73-414b-bcdd-e9abed9edc58", "address": "fa:16:3e:61:20:fd", "network": {"id": "954a06a6-91b0-4e43-a964-815d7cb120a7", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1314830830-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "774aaaffb55b401eae1c919aa2f45675", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53955f0e-c162-4cef-8bd5-335b369c36b6", "external-id": "nsx-vlan-transportzone-623", "segmentation_id": 623, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeba12fff-0f", "ovs_interfaceid": "eba12fff-0f73-414b-bcdd-e9abed9edc58", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1726.650632] env[62405]: DEBUG nova.scheduler.client.report [None req-9f69f501-45bd-4c8d-8379-84744a1a9aba tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1726.750277] env[62405]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1726.750277] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52d167a4-9eef-d186-e7b5-962d423395b8" [ 1726.750277] env[62405]: _type = "HttpNfcLease" [ 1726.750277] env[62405]: } is ready. {{(pid=62405) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1726.750589] env[62405]: DEBUG oslo_vmware.rw_handles [None req-8b59a3e3-61d0-469c-bb7b-a8a5014805bd tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1726.750589] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52d167a4-9eef-d186-e7b5-962d423395b8" [ 1726.750589] env[62405]: _type = "HttpNfcLease" [ 1726.750589] env[62405]: }. {{(pid=62405) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1726.751357] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abdf0e97-c3ff-45d5-8149-3a8950c0ac80 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.760066] env[62405]: DEBUG oslo_vmware.rw_handles [None req-8b59a3e3-61d0-469c-bb7b-a8a5014805bd tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/529a10f5-3866-d6ed-20f4-a1f524a461b8/disk-0.vmdk from lease info. {{(pid=62405) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1726.760270] env[62405]: DEBUG oslo_vmware.rw_handles [None req-8b59a3e3-61d0-469c-bb7b-a8a5014805bd tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/529a10f5-3866-d6ed-20f4-a1f524a461b8/disk-0.vmdk for reading. {{(pid=62405) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1726.768140] env[62405]: DEBUG oslo_vmware.api [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Task: {'id': task-1947413, 'name': ReconfigVM_Task, 'duration_secs': 0.513103} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1726.821701] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: 9b495caf-4394-40c0-b68f-d02c7d759a6a] Reconfigured VM instance instance-00000039 to attach disk [datastore1] 9b495caf-4394-40c0-b68f-d02c7d759a6a/9b495caf-4394-40c0-b68f-d02c7d759a6a.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1726.826958] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c23ca752-9b75-409d-8b07-fda4e0fe856a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.835621] env[62405]: DEBUG oslo_vmware.api [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Waiting for the task: (returnval){ [ 1726.835621] env[62405]: value = "task-1947415" [ 1726.835621] env[62405]: _type = "Task" [ 1726.835621] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1726.845529] env[62405]: DEBUG oslo_vmware.api [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Task: {'id': task-1947415, 'name': Rename_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1726.896967] env[62405]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-a870000d-2b6d-40df-bb95-b162122c5089 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.921777] env[62405]: DEBUG oslo_concurrency.lockutils [req-4b522e32-cb46-4d04-9aca-d13c8c3a6337 req-6508b14b-40ff-4a3d-89bb-b838d2591b04 service nova] Releasing lock "refresh_cache-4c8c0d2f-d8d3-4422-8a5c-8999636b22be" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1727.093985] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Releasing lock "refresh_cache-dbb5dda5-5420-4d7b-8b32-152d51cb2fb9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1727.094338] env[62405]: DEBUG nova.compute.manager [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: dbb5dda5-5420-4d7b-8b32-152d51cb2fb9] Instance network_info: |[{"id": "eba12fff-0f73-414b-bcdd-e9abed9edc58", "address": "fa:16:3e:61:20:fd", "network": {"id": "954a06a6-91b0-4e43-a964-815d7cb120a7", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1314830830-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "774aaaffb55b401eae1c919aa2f45675", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53955f0e-c162-4cef-8bd5-335b369c36b6", "external-id": "nsx-vlan-transportzone-623", "segmentation_id": 623, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeba12fff-0f", "ovs_interfaceid": "eba12fff-0f73-414b-bcdd-e9abed9edc58", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1727.094750] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: dbb5dda5-5420-4d7b-8b32-152d51cb2fb9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:61:20:fd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '53955f0e-c162-4cef-8bd5-335b369c36b6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'eba12fff-0f73-414b-bcdd-e9abed9edc58', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1727.102226] env[62405]: DEBUG oslo.service.loopingcall [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1727.102444] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dbb5dda5-5420-4d7b-8b32-152d51cb2fb9] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1727.102673] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f6ab00e8-d505-4bda-8f44-8e76256a552a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.126435] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1727.126435] env[62405]: value = "task-1947416" [ 1727.126435] env[62405]: _type = "Task" [ 1727.126435] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1727.135318] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947416, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1727.156684] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9f69f501-45bd-4c8d-8379-84744a1a9aba tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.614s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1727.157341] env[62405]: DEBUG nova.compute.manager [None req-9f69f501-45bd-4c8d-8379-84744a1a9aba tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 79548471-56f8-410c-a664-d2242541cd2a] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1727.159988] env[62405]: DEBUG oslo_concurrency.lockutils [None req-04461eed-6f7d-4d3b-a315-31ecc81db4df tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 48.265s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1727.160240] env[62405]: DEBUG nova.objects.instance [None req-04461eed-6f7d-4d3b-a315-31ecc81db4df tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Lazy-loading 'resources' on Instance uuid 59957a81-5297-43d3-a673-024a53a19116 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1727.296976] env[62405]: DEBUG nova.network.neutron [req-c99feaff-75e8-4613-950b-9bb9d9343869 req-c3d05137-0532-4b85-936e-face949eb8f4 service nova] [instance: 2c623c00-92f2-4cc4-8503-963c3308d708] Updated VIF entry in instance network info cache for port 3cb3354b-4416-4325-9602-8abc5afe9861. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1727.297502] env[62405]: DEBUG nova.network.neutron [req-c99feaff-75e8-4613-950b-9bb9d9343869 req-c3d05137-0532-4b85-936e-face949eb8f4 service nova] [instance: 2c623c00-92f2-4cc4-8503-963c3308d708] Updating instance_info_cache with network_info: [{"id": "3cb3354b-4416-4325-9602-8abc5afe9861", "address": "fa:16:3e:55:48:52", "network": {"id": "dfd15c21-b7a5-492a-afe3-8eb96515351d", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-175692276-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.130", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6014bab6bc9a4b059bab88e44b31f446", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "950a2f67-7668-4376-9d48-b38dca033c40", "external-id": "nsx-vlan-transportzone-549", "segmentation_id": 549, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3cb3354b-44", "ovs_interfaceid": "3cb3354b-4416-4325-9602-8abc5afe9861", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1727.347966] env[62405]: DEBUG oslo_vmware.api [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Task: {'id': task-1947415, 'name': Rename_Task, 'duration_secs': 0.154033} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1727.348606] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: 9b495caf-4394-40c0-b68f-d02c7d759a6a] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1727.349776] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bc7b8f6c-00d8-4ea6-9855-91983a8b39a5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.360933] env[62405]: DEBUG oslo_vmware.api [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Waiting for the task: (returnval){ [ 1727.360933] env[62405]: value = "task-1947417" [ 1727.360933] env[62405]: _type = "Task" [ 1727.360933] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1727.371798] env[62405]: DEBUG oslo_vmware.api [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Task: {'id': task-1947417, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1727.642733] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947416, 'name': CreateVM_Task, 'duration_secs': 0.447595} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1727.642733] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dbb5dda5-5420-4d7b-8b32-152d51cb2fb9] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1727.644346] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1727.644346] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1727.644972] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1727.644972] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f1f7400c-77b1-448e-a7b1-249934a38334 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.650532] env[62405]: DEBUG oslo_vmware.api [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Waiting for the task: (returnval){ [ 1727.650532] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52bd83ed-b65b-8855-f7fc-9b00367ca0b9" [ 1727.650532] env[62405]: _type = "Task" [ 1727.650532] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1727.661739] env[62405]: DEBUG oslo_vmware.api [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52bd83ed-b65b-8855-f7fc-9b00367ca0b9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1727.663139] env[62405]: DEBUG nova.compute.utils [None req-9f69f501-45bd-4c8d-8379-84744a1a9aba tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1727.667583] env[62405]: DEBUG nova.compute.manager [None req-9f69f501-45bd-4c8d-8379-84744a1a9aba tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 79548471-56f8-410c-a664-d2242541cd2a] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1727.667906] env[62405]: DEBUG nova.network.neutron [None req-9f69f501-45bd-4c8d-8379-84744a1a9aba tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 79548471-56f8-410c-a664-d2242541cd2a] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1727.752619] env[62405]: DEBUG nova.policy [None req-9f69f501-45bd-4c8d-8379-84744a1a9aba tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6472af0b6f6240f297f7f137cde41929', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bb1da47e8b1a400fab7817d9e6b282ed', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1727.799936] env[62405]: DEBUG oslo_concurrency.lockutils [req-c99feaff-75e8-4613-950b-9bb9d9343869 req-c3d05137-0532-4b85-936e-face949eb8f4 service nova] Releasing lock "refresh_cache-2c623c00-92f2-4cc4-8503-963c3308d708" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1727.873506] env[62405]: DEBUG oslo_vmware.api [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Task: {'id': task-1947417, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1727.988308] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquiring lock "9d97bf1d-6830-48b1-831b-bf2b52188f32" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1727.991431] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Lock "9d97bf1d-6830-48b1-831b-bf2b52188f32" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.004s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1728.163136] env[62405]: DEBUG oslo_vmware.api [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52bd83ed-b65b-8855-f7fc-9b00367ca0b9, 'name': SearchDatastore_Task, 'duration_secs': 0.01139} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1728.165418] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1728.165894] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: dbb5dda5-5420-4d7b-8b32-152d51cb2fb9] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1728.166397] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1728.166680] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1728.166931] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1728.167529] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-843e61c3-6e05-4501-becb-e90a7eadef68 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.172096] env[62405]: DEBUG nova.compute.manager [None req-9f69f501-45bd-4c8d-8379-84744a1a9aba tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 79548471-56f8-410c-a664-d2242541cd2a] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1728.183558] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1728.183878] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1728.184634] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-70d487a2-62b4-4ff7-927b-0cd9bf41bd37 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.195031] env[62405]: DEBUG oslo_vmware.api [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Waiting for the task: (returnval){ [ 1728.195031] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]522e862f-f5f4-2bba-b4dc-d7d5148e0acc" [ 1728.195031] env[62405]: _type = "Task" [ 1728.195031] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1728.204734] env[62405]: DEBUG oslo_vmware.api [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]522e862f-f5f4-2bba-b4dc-d7d5148e0acc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1728.230830] env[62405]: DEBUG nova.network.neutron [None req-9f69f501-45bd-4c8d-8379-84744a1a9aba tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 79548471-56f8-410c-a664-d2242541cd2a] Successfully created port: 512621ba-6031-4414-bcd1-627311dbd9a0 {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1728.250885] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71d035e8-aaf9-4615-b32b-b88fd79047aa {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.259653] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f1d2455-ac87-4f8d-9984-c152958150b4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.292408] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6385f74-29ea-45d5-86a3-2e4eee7d67f9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.301576] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41cbbf95-ee18-4c9b-8fa0-0de4b7479a47 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.318158] env[62405]: DEBUG nova.compute.provider_tree [None req-04461eed-6f7d-4d3b-a315-31ecc81db4df tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1728.371529] env[62405]: DEBUG oslo_vmware.api [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Task: {'id': task-1947417, 'name': PowerOnVM_Task, 'duration_secs': 0.51848} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1728.371951] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: 9b495caf-4394-40c0-b68f-d02c7d759a6a] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1728.372326] env[62405]: INFO nova.compute.manager [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: 9b495caf-4394-40c0-b68f-d02c7d759a6a] Took 7.64 seconds to spawn the instance on the hypervisor. [ 1728.372635] env[62405]: DEBUG nova.compute.manager [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: 9b495caf-4394-40c0-b68f-d02c7d759a6a] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1728.373430] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db4a85b8-6b0a-4685-bdcf-b99669a0bb98 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.706273] env[62405]: DEBUG oslo_vmware.api [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]522e862f-f5f4-2bba-b4dc-d7d5148e0acc, 'name': SearchDatastore_Task, 'duration_secs': 0.018526} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1728.708013] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8fbed256-057e-4bbd-b524-0cb0601e40ef {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.714483] env[62405]: DEBUG oslo_vmware.api [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Waiting for the task: (returnval){ [ 1728.714483] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]523f7ce2-6155-53b2-19ef-fa8b02779bf9" [ 1728.714483] env[62405]: _type = "Task" [ 1728.714483] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1728.723187] env[62405]: DEBUG oslo_vmware.api [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]523f7ce2-6155-53b2-19ef-fa8b02779bf9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1728.821130] env[62405]: DEBUG nova.scheduler.client.report [None req-04461eed-6f7d-4d3b-a315-31ecc81db4df tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1728.893134] env[62405]: INFO nova.compute.manager [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: 9b495caf-4394-40c0-b68f-d02c7d759a6a] Took 54.40 seconds to build instance. [ 1729.180688] env[62405]: DEBUG nova.compute.manager [None req-9f69f501-45bd-4c8d-8379-84744a1a9aba tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 79548471-56f8-410c-a664-d2242541cd2a] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1729.207670] env[62405]: DEBUG nova.virt.hardware [None req-9f69f501-45bd-4c8d-8379-84744a1a9aba tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1729.207936] env[62405]: DEBUG nova.virt.hardware [None req-9f69f501-45bd-4c8d-8379-84744a1a9aba tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1729.208107] env[62405]: DEBUG nova.virt.hardware [None req-9f69f501-45bd-4c8d-8379-84744a1a9aba tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1729.208293] env[62405]: DEBUG nova.virt.hardware [None req-9f69f501-45bd-4c8d-8379-84744a1a9aba tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1729.208442] env[62405]: DEBUG nova.virt.hardware [None req-9f69f501-45bd-4c8d-8379-84744a1a9aba tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1729.208592] env[62405]: DEBUG nova.virt.hardware [None req-9f69f501-45bd-4c8d-8379-84744a1a9aba tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1729.208787] env[62405]: DEBUG nova.virt.hardware [None req-9f69f501-45bd-4c8d-8379-84744a1a9aba tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1729.208950] env[62405]: DEBUG nova.virt.hardware [None req-9f69f501-45bd-4c8d-8379-84744a1a9aba tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1729.209131] env[62405]: DEBUG nova.virt.hardware [None req-9f69f501-45bd-4c8d-8379-84744a1a9aba tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1729.209296] env[62405]: DEBUG nova.virt.hardware [None req-9f69f501-45bd-4c8d-8379-84744a1a9aba tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1729.209468] env[62405]: DEBUG nova.virt.hardware [None req-9f69f501-45bd-4c8d-8379-84744a1a9aba tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1729.210355] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a9c5d71-d507-42e0-9b63-4dfa57eb79e6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.222007] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdd8bec3-3ab6-4e49-aa21-076a9f1c55d5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.229236] env[62405]: DEBUG oslo_vmware.api [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]523f7ce2-6155-53b2-19ef-fa8b02779bf9, 'name': SearchDatastore_Task, 'duration_secs': 0.020203} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1729.229804] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1729.230121] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] dbb5dda5-5420-4d7b-8b32-152d51cb2fb9/dbb5dda5-5420-4d7b-8b32-152d51cb2fb9.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1729.230360] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c0d34d4a-a2bf-4449-893c-32d333de461d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.245472] env[62405]: DEBUG oslo_vmware.api [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Waiting for the task: (returnval){ [ 1729.245472] env[62405]: value = "task-1947418" [ 1729.245472] env[62405]: _type = "Task" [ 1729.245472] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1729.253754] env[62405]: DEBUG oslo_vmware.api [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Task: {'id': task-1947418, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1729.326185] env[62405]: DEBUG oslo_concurrency.lockutils [None req-04461eed-6f7d-4d3b-a315-31ecc81db4df tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.166s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1729.329196] env[62405]: DEBUG oslo_concurrency.lockutils [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 47.295s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1729.330579] env[62405]: INFO nova.compute.claims [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a1a84837-deef-4ffc-8a47-4891bfc2c87a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1729.348887] env[62405]: INFO nova.scheduler.client.report [None req-04461eed-6f7d-4d3b-a315-31ecc81db4df tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Deleted allocations for instance 59957a81-5297-43d3-a673-024a53a19116 [ 1729.393884] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Lock "9b495caf-4394-40c0-b68f-d02c7d759a6a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 55.910s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1729.758822] env[62405]: DEBUG oslo_vmware.api [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Task: {'id': task-1947418, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1729.761107] env[62405]: DEBUG nova.compute.manager [req-aed01bf6-90f2-4c8f-b5f7-b5689be1ae5f req-5f7c8049-310e-4853-ac81-52b815510150 service nova] [instance: 79548471-56f8-410c-a664-d2242541cd2a] Received event network-vif-plugged-512621ba-6031-4414-bcd1-627311dbd9a0 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1729.761366] env[62405]: DEBUG oslo_concurrency.lockutils [req-aed01bf6-90f2-4c8f-b5f7-b5689be1ae5f req-5f7c8049-310e-4853-ac81-52b815510150 service nova] Acquiring lock "79548471-56f8-410c-a664-d2242541cd2a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1729.761586] env[62405]: DEBUG oslo_concurrency.lockutils [req-aed01bf6-90f2-4c8f-b5f7-b5689be1ae5f req-5f7c8049-310e-4853-ac81-52b815510150 service nova] Lock "79548471-56f8-410c-a664-d2242541cd2a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1729.763359] env[62405]: DEBUG oslo_concurrency.lockutils [req-aed01bf6-90f2-4c8f-b5f7-b5689be1ae5f req-5f7c8049-310e-4853-ac81-52b815510150 service nova] Lock "79548471-56f8-410c-a664-d2242541cd2a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1729.763359] env[62405]: DEBUG nova.compute.manager [req-aed01bf6-90f2-4c8f-b5f7-b5689be1ae5f req-5f7c8049-310e-4853-ac81-52b815510150 service nova] [instance: 79548471-56f8-410c-a664-d2242541cd2a] No waiting events found dispatching network-vif-plugged-512621ba-6031-4414-bcd1-627311dbd9a0 {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1729.763359] env[62405]: WARNING nova.compute.manager [req-aed01bf6-90f2-4c8f-b5f7-b5689be1ae5f req-5f7c8049-310e-4853-ac81-52b815510150 service nova] [instance: 79548471-56f8-410c-a664-d2242541cd2a] Received unexpected event network-vif-plugged-512621ba-6031-4414-bcd1-627311dbd9a0 for instance with vm_state building and task_state spawning. [ 1729.857866] env[62405]: DEBUG oslo_concurrency.lockutils [None req-04461eed-6f7d-4d3b-a315-31ecc81db4df tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Lock "59957a81-5297-43d3-a673-024a53a19116" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 54.235s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1729.896771] env[62405]: DEBUG nova.compute.manager [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 9aa9e0de-7314-4d8b-8e9f-b6d330cae914] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1729.955756] env[62405]: DEBUG nova.network.neutron [None req-9f69f501-45bd-4c8d-8379-84744a1a9aba tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 79548471-56f8-410c-a664-d2242541cd2a] Successfully updated port: 512621ba-6031-4414-bcd1-627311dbd9a0 {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1730.259554] env[62405]: DEBUG oslo_vmware.api [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Task: {'id': task-1947418, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.8532} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1730.259685] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] dbb5dda5-5420-4d7b-8b32-152d51cb2fb9/dbb5dda5-5420-4d7b-8b32-152d51cb2fb9.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1730.259920] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: dbb5dda5-5420-4d7b-8b32-152d51cb2fb9] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1730.260256] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3e77853b-69d7-4803-9194-828f549ed107 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.269314] env[62405]: DEBUG oslo_vmware.api [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Waiting for the task: (returnval){ [ 1730.269314] env[62405]: value = "task-1947419" [ 1730.269314] env[62405]: _type = "Task" [ 1730.269314] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1730.282012] env[62405]: DEBUG oslo_vmware.api [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Task: {'id': task-1947419, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1730.414778] env[62405]: DEBUG oslo_concurrency.lockutils [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1730.460317] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9f69f501-45bd-4c8d-8379-84744a1a9aba tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Acquiring lock "refresh_cache-79548471-56f8-410c-a664-d2242541cd2a" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1730.460465] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9f69f501-45bd-4c8d-8379-84744a1a9aba tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Acquired lock "refresh_cache-79548471-56f8-410c-a664-d2242541cd2a" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1730.460769] env[62405]: DEBUG nova.network.neutron [None req-9f69f501-45bd-4c8d-8379-84744a1a9aba tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 79548471-56f8-410c-a664-d2242541cd2a] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1730.749084] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0383a657-cc67-4164-bc2c-116d88a70237 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.757745] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e9d28fd-4c3f-4aa0-b979-738429671856 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.791086] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87615676-063d-4b97-aa58-35bfc27ad891 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.801406] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66b5c0cf-86cd-48c6-9743-e884aea4f3f1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.805215] env[62405]: DEBUG oslo_vmware.api [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Task: {'id': task-1947419, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069481} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1730.805490] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: dbb5dda5-5420-4d7b-8b32-152d51cb2fb9] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1730.806564] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9e08ddf-207c-4b5f-87f3-4edc52be8368 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.817766] env[62405]: DEBUG nova.compute.provider_tree [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1730.838610] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: dbb5dda5-5420-4d7b-8b32-152d51cb2fb9] Reconfiguring VM instance instance-0000003a to attach disk [datastore1] dbb5dda5-5420-4d7b-8b32-152d51cb2fb9/dbb5dda5-5420-4d7b-8b32-152d51cb2fb9.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1730.839518] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0f4fa9bb-c644-433e-8e3f-0fdf1aef5475 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.860535] env[62405]: DEBUG oslo_vmware.api [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Waiting for the task: (returnval){ [ 1730.860535] env[62405]: value = "task-1947420" [ 1730.860535] env[62405]: _type = "Task" [ 1730.860535] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1730.870628] env[62405]: DEBUG oslo_vmware.api [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Task: {'id': task-1947420, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1731.011201] env[62405]: DEBUG nova.network.neutron [None req-9f69f501-45bd-4c8d-8379-84744a1a9aba tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 79548471-56f8-410c-a664-d2242541cd2a] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1731.279845] env[62405]: DEBUG nova.network.neutron [None req-9f69f501-45bd-4c8d-8379-84744a1a9aba tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 79548471-56f8-410c-a664-d2242541cd2a] Updating instance_info_cache with network_info: [{"id": "512621ba-6031-4414-bcd1-627311dbd9a0", "address": "fa:16:3e:9c:fa:39", "network": {"id": "ac0e1447-1c61-4770-8006-3a99edc76f93", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1648941742-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bb1da47e8b1a400fab7817d9e6b282ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "298bb8ef-4765-494c-b157-7a349218bd1e", "external-id": "nsx-vlan-transportzone-905", "segmentation_id": 905, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap512621ba-60", "ovs_interfaceid": "512621ba-6031-4414-bcd1-627311dbd9a0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1731.321075] env[62405]: DEBUG nova.scheduler.client.report [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1731.372370] env[62405]: DEBUG oslo_vmware.api [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Task: {'id': task-1947420, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1731.783210] env[62405]: DEBUG nova.compute.manager [req-19e5ccce-257f-4c00-ad4d-efbfdc228451 req-47cc51b6-ada3-4bb7-9778-8a78290195e8 service nova] [instance: 79548471-56f8-410c-a664-d2242541cd2a] Received event network-changed-512621ba-6031-4414-bcd1-627311dbd9a0 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1731.783441] env[62405]: DEBUG nova.compute.manager [req-19e5ccce-257f-4c00-ad4d-efbfdc228451 req-47cc51b6-ada3-4bb7-9778-8a78290195e8 service nova] [instance: 79548471-56f8-410c-a664-d2242541cd2a] Refreshing instance network info cache due to event network-changed-512621ba-6031-4414-bcd1-627311dbd9a0. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1731.783604] env[62405]: DEBUG oslo_concurrency.lockutils [req-19e5ccce-257f-4c00-ad4d-efbfdc228451 req-47cc51b6-ada3-4bb7-9778-8a78290195e8 service nova] Acquiring lock "refresh_cache-79548471-56f8-410c-a664-d2242541cd2a" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1731.783955] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9f69f501-45bd-4c8d-8379-84744a1a9aba tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Releasing lock "refresh_cache-79548471-56f8-410c-a664-d2242541cd2a" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1731.784258] env[62405]: DEBUG nova.compute.manager [None req-9f69f501-45bd-4c8d-8379-84744a1a9aba tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 79548471-56f8-410c-a664-d2242541cd2a] Instance network_info: |[{"id": "512621ba-6031-4414-bcd1-627311dbd9a0", "address": "fa:16:3e:9c:fa:39", "network": {"id": "ac0e1447-1c61-4770-8006-3a99edc76f93", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1648941742-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bb1da47e8b1a400fab7817d9e6b282ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "298bb8ef-4765-494c-b157-7a349218bd1e", "external-id": "nsx-vlan-transportzone-905", "segmentation_id": 905, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap512621ba-60", "ovs_interfaceid": "512621ba-6031-4414-bcd1-627311dbd9a0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1731.784809] env[62405]: DEBUG oslo_concurrency.lockutils [req-19e5ccce-257f-4c00-ad4d-efbfdc228451 req-47cc51b6-ada3-4bb7-9778-8a78290195e8 service nova] Acquired lock "refresh_cache-79548471-56f8-410c-a664-d2242541cd2a" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1731.784987] env[62405]: DEBUG nova.network.neutron [req-19e5ccce-257f-4c00-ad4d-efbfdc228451 req-47cc51b6-ada3-4bb7-9778-8a78290195e8 service nova] [instance: 79548471-56f8-410c-a664-d2242541cd2a] Refreshing network info cache for port 512621ba-6031-4414-bcd1-627311dbd9a0 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1731.786137] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-9f69f501-45bd-4c8d-8379-84744a1a9aba tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 79548471-56f8-410c-a664-d2242541cd2a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9c:fa:39', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '298bb8ef-4765-494c-b157-7a349218bd1e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '512621ba-6031-4414-bcd1-627311dbd9a0', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1731.793555] env[62405]: DEBUG oslo.service.loopingcall [None req-9f69f501-45bd-4c8d-8379-84744a1a9aba tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1731.794567] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 79548471-56f8-410c-a664-d2242541cd2a] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1731.794792] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7dfc6e24-abd6-43ef-a3bf-b8f52dcb1d35 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.815117] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1731.815117] env[62405]: value = "task-1947421" [ 1731.815117] env[62405]: _type = "Task" [ 1731.815117] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1731.823685] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947421, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1731.826717] env[62405]: DEBUG oslo_concurrency.lockutils [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.498s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1731.827227] env[62405]: DEBUG nova.compute.manager [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a1a84837-deef-4ffc-8a47-4891bfc2c87a] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1731.829820] env[62405]: DEBUG oslo_concurrency.lockutils [None req-069bf9a7-48de-4970-978f-4a79a8c4e109 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 49.748s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1731.830074] env[62405]: DEBUG nova.objects.instance [None req-069bf9a7-48de-4970-978f-4a79a8c4e109 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Lazy-loading 'resources' on Instance uuid a6a0e918-425d-44de-a22b-8779e9108533 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1731.873058] env[62405]: DEBUG oslo_vmware.api [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Task: {'id': task-1947420, 'name': ReconfigVM_Task, 'duration_secs': 0.698017} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1731.873058] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: dbb5dda5-5420-4d7b-8b32-152d51cb2fb9] Reconfigured VM instance instance-0000003a to attach disk [datastore1] dbb5dda5-5420-4d7b-8b32-152d51cb2fb9/dbb5dda5-5420-4d7b-8b32-152d51cb2fb9.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1731.873058] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c73e1643-11b1-4956-b6bd-28ef151db9bf {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.880030] env[62405]: DEBUG oslo_vmware.api [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Waiting for the task: (returnval){ [ 1731.880030] env[62405]: value = "task-1947422" [ 1731.880030] env[62405]: _type = "Task" [ 1731.880030] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1731.890838] env[62405]: DEBUG oslo_vmware.api [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Task: {'id': task-1947422, 'name': Rename_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1732.325936] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947421, 'name': CreateVM_Task, 'duration_secs': 0.475829} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1732.326090] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 79548471-56f8-410c-a664-d2242541cd2a] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1732.326748] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9f69f501-45bd-4c8d-8379-84744a1a9aba tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1732.326919] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9f69f501-45bd-4c8d-8379-84744a1a9aba tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1732.327269] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9f69f501-45bd-4c8d-8379-84744a1a9aba tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1732.327548] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1df26264-8d0a-46c7-b6d5-9ff38391f620 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.332622] env[62405]: DEBUG nova.compute.utils [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1732.336900] env[62405]: DEBUG nova.compute.manager [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a1a84837-deef-4ffc-8a47-4891bfc2c87a] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1732.337082] env[62405]: DEBUG nova.network.neutron [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a1a84837-deef-4ffc-8a47-4891bfc2c87a] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1732.350893] env[62405]: DEBUG oslo_vmware.api [None req-9f69f501-45bd-4c8d-8379-84744a1a9aba tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Waiting for the task: (returnval){ [ 1732.350893] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52b981bd-adc2-138a-4648-2bb17a61fe8b" [ 1732.350893] env[62405]: _type = "Task" [ 1732.350893] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1732.367031] env[62405]: DEBUG oslo_vmware.api [None req-9f69f501-45bd-4c8d-8379-84744a1a9aba tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52b981bd-adc2-138a-4648-2bb17a61fe8b, 'name': SearchDatastore_Task, 'duration_secs': 0.015243} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1732.369843] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9f69f501-45bd-4c8d-8379-84744a1a9aba tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1732.370093] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-9f69f501-45bd-4c8d-8379-84744a1a9aba tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 79548471-56f8-410c-a664-d2242541cd2a] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1732.370326] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9f69f501-45bd-4c8d-8379-84744a1a9aba tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1732.370468] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9f69f501-45bd-4c8d-8379-84744a1a9aba tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1732.370655] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f69f501-45bd-4c8d-8379-84744a1a9aba tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1732.371112] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-26b04072-f31f-4b68-a5bb-a4d88ce40946 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.392965] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f69f501-45bd-4c8d-8379-84744a1a9aba tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1732.393410] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-9f69f501-45bd-4c8d-8379-84744a1a9aba tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1732.398655] env[62405]: DEBUG nova.policy [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ae6ca334510b4445a23dc2fb38215590', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f1a1645e38674042828c78155974f95e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1732.400705] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ee1b4d2f-0aa9-4aad-8648-89b33a6c9da1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.412432] env[62405]: DEBUG oslo_vmware.api [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Task: {'id': task-1947422, 'name': Rename_Task, 'duration_secs': 0.221098} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1732.412432] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: dbb5dda5-5420-4d7b-8b32-152d51cb2fb9] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1732.412432] env[62405]: DEBUG oslo_vmware.api [None req-9f69f501-45bd-4c8d-8379-84744a1a9aba tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Waiting for the task: (returnval){ [ 1732.412432] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]526903e8-b3aa-66d6-2dd4-0256985cec87" [ 1732.412432] env[62405]: _type = "Task" [ 1732.412432] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1732.412432] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5971def7-e154-472e-9a0e-5fa9e273cf1e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.426190] env[62405]: DEBUG oslo_vmware.api [None req-9f69f501-45bd-4c8d-8379-84744a1a9aba tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]526903e8-b3aa-66d6-2dd4-0256985cec87, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1732.427754] env[62405]: DEBUG oslo_vmware.api [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Waiting for the task: (returnval){ [ 1732.427754] env[62405]: value = "task-1947423" [ 1732.427754] env[62405]: _type = "Task" [ 1732.427754] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1732.447856] env[62405]: DEBUG oslo_vmware.api [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Task: {'id': task-1947423, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1732.724993] env[62405]: DEBUG nova.network.neutron [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a1a84837-deef-4ffc-8a47-4891bfc2c87a] Successfully created port: 64634a81-f1e1-4078-894a-2f4e8b56de13 {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1732.822137] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b7821db-4e65-4c28-85f2-514de52f11ca {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.831358] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be6968cc-4075-4de1-83ff-7e0a33b815eb {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.837010] env[62405]: DEBUG nova.compute.manager [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a1a84837-deef-4ffc-8a47-4891bfc2c87a] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1732.868358] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ecc27b1-bbe2-460f-a1b8-f3895e19f5d0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.877704] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-682c2aba-275d-4a9e-b181-ab296956319d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.897331] env[62405]: DEBUG nova.compute.provider_tree [None req-069bf9a7-48de-4970-978f-4a79a8c4e109 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1733.681993] env[62405]: DEBUG nova.scheduler.client.report [None req-069bf9a7-48de-4970-978f-4a79a8c4e109 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1733.688025] env[62405]: DEBUG oslo_vmware.api [None req-9f69f501-45bd-4c8d-8379-84744a1a9aba tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]526903e8-b3aa-66d6-2dd4-0256985cec87, 'name': SearchDatastore_Task, 'duration_secs': 0.020234} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1733.688025] env[62405]: DEBUG nova.network.neutron [req-19e5ccce-257f-4c00-ad4d-efbfdc228451 req-47cc51b6-ada3-4bb7-9778-8a78290195e8 service nova] [instance: 79548471-56f8-410c-a664-d2242541cd2a] Updated VIF entry in instance network info cache for port 512621ba-6031-4414-bcd1-627311dbd9a0. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1733.688025] env[62405]: DEBUG nova.network.neutron [req-19e5ccce-257f-4c00-ad4d-efbfdc228451 req-47cc51b6-ada3-4bb7-9778-8a78290195e8 service nova] [instance: 79548471-56f8-410c-a664-d2242541cd2a] Updating instance_info_cache with network_info: [{"id": "512621ba-6031-4414-bcd1-627311dbd9a0", "address": "fa:16:3e:9c:fa:39", "network": {"id": "ac0e1447-1c61-4770-8006-3a99edc76f93", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1648941742-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bb1da47e8b1a400fab7817d9e6b282ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "298bb8ef-4765-494c-b157-7a349218bd1e", "external-id": "nsx-vlan-transportzone-905", "segmentation_id": 905, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap512621ba-60", "ovs_interfaceid": "512621ba-6031-4414-bcd1-627311dbd9a0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1733.691102] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f17c02a5-3240-48be-8754-15a84a2bdeb1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.700731] env[62405]: DEBUG oslo_vmware.api [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Task: {'id': task-1947423, 'name': PowerOnVM_Task, 'duration_secs': 0.813924} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1733.702888] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: dbb5dda5-5420-4d7b-8b32-152d51cb2fb9] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1733.703108] env[62405]: INFO nova.compute.manager [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: dbb5dda5-5420-4d7b-8b32-152d51cb2fb9] Took 10.33 seconds to spawn the instance on the hypervisor. [ 1733.703290] env[62405]: DEBUG nova.compute.manager [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: dbb5dda5-5420-4d7b-8b32-152d51cb2fb9] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1733.703897] env[62405]: DEBUG oslo_vmware.api [None req-9f69f501-45bd-4c8d-8379-84744a1a9aba tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Waiting for the task: (returnval){ [ 1733.703897] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5233dbb2-dcfe-fd9f-646b-ed7eec281e1a" [ 1733.703897] env[62405]: _type = "Task" [ 1733.703897] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1733.704597] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7db1a39-9e7a-4fe1-8d9a-89da5acb27e6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.716067] env[62405]: DEBUG oslo_vmware.api [None req-9f69f501-45bd-4c8d-8379-84744a1a9aba tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5233dbb2-dcfe-fd9f-646b-ed7eec281e1a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1734.150368] env[62405]: DEBUG nova.compute.manager [req-9cb6e1b9-69b9-4a36-a559-a5c5675b377f req-dbbaaa61-f258-4aaf-b6a0-dd3f9477694d service nova] [instance: a1a84837-deef-4ffc-8a47-4891bfc2c87a] Received event network-vif-plugged-64634a81-f1e1-4078-894a-2f4e8b56de13 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1734.150368] env[62405]: DEBUG oslo_concurrency.lockutils [req-9cb6e1b9-69b9-4a36-a559-a5c5675b377f req-dbbaaa61-f258-4aaf-b6a0-dd3f9477694d service nova] Acquiring lock "a1a84837-deef-4ffc-8a47-4891bfc2c87a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1734.150368] env[62405]: DEBUG oslo_concurrency.lockutils [req-9cb6e1b9-69b9-4a36-a559-a5c5675b377f req-dbbaaa61-f258-4aaf-b6a0-dd3f9477694d service nova] Lock "a1a84837-deef-4ffc-8a47-4891bfc2c87a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1734.150368] env[62405]: DEBUG oslo_concurrency.lockutils [req-9cb6e1b9-69b9-4a36-a559-a5c5675b377f req-dbbaaa61-f258-4aaf-b6a0-dd3f9477694d service nova] Lock "a1a84837-deef-4ffc-8a47-4891bfc2c87a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1734.151305] env[62405]: DEBUG nova.compute.manager [req-9cb6e1b9-69b9-4a36-a559-a5c5675b377f req-dbbaaa61-f258-4aaf-b6a0-dd3f9477694d service nova] [instance: a1a84837-deef-4ffc-8a47-4891bfc2c87a] No waiting events found dispatching network-vif-plugged-64634a81-f1e1-4078-894a-2f4e8b56de13 {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1734.151700] env[62405]: WARNING nova.compute.manager [req-9cb6e1b9-69b9-4a36-a559-a5c5675b377f req-dbbaaa61-f258-4aaf-b6a0-dd3f9477694d service nova] [instance: a1a84837-deef-4ffc-8a47-4891bfc2c87a] Received unexpected event network-vif-plugged-64634a81-f1e1-4078-894a-2f4e8b56de13 for instance with vm_state building and task_state spawning. [ 1734.189034] env[62405]: DEBUG oslo_concurrency.lockutils [None req-069bf9a7-48de-4970-978f-4a79a8c4e109 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.358s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1734.191487] env[62405]: DEBUG nova.compute.manager [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a1a84837-deef-4ffc-8a47-4891bfc2c87a] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1734.195575] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 48.450s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1734.196488] env[62405]: INFO nova.compute.claims [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1734.199274] env[62405]: DEBUG oslo_concurrency.lockutils [req-19e5ccce-257f-4c00-ad4d-efbfdc228451 req-47cc51b6-ada3-4bb7-9778-8a78290195e8 service nova] Releasing lock "refresh_cache-79548471-56f8-410c-a664-d2242541cd2a" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1734.220552] env[62405]: INFO nova.scheduler.client.report [None req-069bf9a7-48de-4970-978f-4a79a8c4e109 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Deleted allocations for instance a6a0e918-425d-44de-a22b-8779e9108533 [ 1734.238171] env[62405]: INFO nova.compute.manager [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: dbb5dda5-5420-4d7b-8b32-152d51cb2fb9] Took 59.71 seconds to build instance. [ 1734.238171] env[62405]: DEBUG oslo_vmware.api [None req-9f69f501-45bd-4c8d-8379-84744a1a9aba tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5233dbb2-dcfe-fd9f-646b-ed7eec281e1a, 'name': SearchDatastore_Task, 'duration_secs': 0.017691} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1734.241020] env[62405]: DEBUG nova.virt.hardware [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1734.241319] env[62405]: DEBUG nova.virt.hardware [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1734.241516] env[62405]: DEBUG nova.virt.hardware [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1734.242107] env[62405]: DEBUG nova.virt.hardware [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1734.242107] env[62405]: DEBUG nova.virt.hardware [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1734.242107] env[62405]: DEBUG nova.virt.hardware [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1734.242363] env[62405]: DEBUG nova.virt.hardware [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1734.242585] env[62405]: DEBUG nova.virt.hardware [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1734.243309] env[62405]: DEBUG nova.virt.hardware [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1734.243309] env[62405]: DEBUG nova.virt.hardware [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1734.243309] env[62405]: DEBUG nova.virt.hardware [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1734.243718] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9f69f501-45bd-4c8d-8379-84744a1a9aba tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1734.244065] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f69f501-45bd-4c8d-8379-84744a1a9aba tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 79548471-56f8-410c-a664-d2242541cd2a/79548471-56f8-410c-a664-d2242541cd2a.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1734.245381] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-592458c6-b1a9-413e-8f50-17bc9b0e0720 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.249533] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4eebca3d-6620-48e0-8082-bc87c3dcd01f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.253397] env[62405]: DEBUG nova.network.neutron [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a1a84837-deef-4ffc-8a47-4891bfc2c87a] Successfully updated port: 64634a81-f1e1-4078-894a-2f4e8b56de13 {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1734.264204] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5886739-b422-4748-8eeb-19bec9489193 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.269036] env[62405]: DEBUG oslo_vmware.api [None req-9f69f501-45bd-4c8d-8379-84744a1a9aba tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Waiting for the task: (returnval){ [ 1734.269036] env[62405]: value = "task-1947424" [ 1734.269036] env[62405]: _type = "Task" [ 1734.269036] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1734.285978] env[62405]: DEBUG oslo_vmware.api [None req-9f69f501-45bd-4c8d-8379-84744a1a9aba tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947424, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1734.729481] env[62405]: DEBUG oslo_concurrency.lockutils [None req-069bf9a7-48de-4970-978f-4a79a8c4e109 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Lock "a6a0e918-425d-44de-a22b-8779e9108533" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 56.727s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1734.750197] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e92f26dd-e88f-42ab-8b5f-71e8f667c2fb tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Lock "dbb5dda5-5420-4d7b-8b32-152d51cb2fb9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 61.234s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1734.758102] env[62405]: DEBUG oslo_concurrency.lockutils [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquiring lock "refresh_cache-a1a84837-deef-4ffc-8a47-4891bfc2c87a" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1734.758284] env[62405]: DEBUG oslo_concurrency.lockutils [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquired lock "refresh_cache-a1a84837-deef-4ffc-8a47-4891bfc2c87a" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1734.758413] env[62405]: DEBUG nova.network.neutron [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a1a84837-deef-4ffc-8a47-4891bfc2c87a] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1734.783269] env[62405]: DEBUG oslo_vmware.api [None req-9f69f501-45bd-4c8d-8379-84744a1a9aba tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947424, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1735.252977] env[62405]: DEBUG nova.compute.manager [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 48554024-9b6f-44be-b21e-615b25cd790c] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1735.283028] env[62405]: DEBUG oslo_vmware.api [None req-9f69f501-45bd-4c8d-8379-84744a1a9aba tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947424, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.609048} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1735.283028] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f69f501-45bd-4c8d-8379-84744a1a9aba tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 79548471-56f8-410c-a664-d2242541cd2a/79548471-56f8-410c-a664-d2242541cd2a.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1735.283215] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-9f69f501-45bd-4c8d-8379-84744a1a9aba tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 79548471-56f8-410c-a664-d2242541cd2a] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1735.284417] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-64d63d79-dad2-47fe-b8f7-080039585710 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.292946] env[62405]: DEBUG oslo_vmware.api [None req-9f69f501-45bd-4c8d-8379-84744a1a9aba tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Waiting for the task: (returnval){ [ 1735.292946] env[62405]: value = "task-1947425" [ 1735.292946] env[62405]: _type = "Task" [ 1735.292946] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1735.298717] env[62405]: DEBUG nova.network.neutron [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a1a84837-deef-4ffc-8a47-4891bfc2c87a] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1735.308465] env[62405]: DEBUG oslo_vmware.api [None req-9f69f501-45bd-4c8d-8379-84744a1a9aba tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947425, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1735.365829] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f8b24120-387e-441c-b47e-4319efc188e9 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Acquiring lock "9b495caf-4394-40c0-b68f-d02c7d759a6a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1735.366146] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f8b24120-387e-441c-b47e-4319efc188e9 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Lock "9b495caf-4394-40c0-b68f-d02c7d759a6a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1735.366337] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f8b24120-387e-441c-b47e-4319efc188e9 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Acquiring lock "9b495caf-4394-40c0-b68f-d02c7d759a6a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1735.366519] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f8b24120-387e-441c-b47e-4319efc188e9 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Lock "9b495caf-4394-40c0-b68f-d02c7d759a6a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1735.366711] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f8b24120-387e-441c-b47e-4319efc188e9 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Lock "9b495caf-4394-40c0-b68f-d02c7d759a6a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1735.369750] env[62405]: INFO nova.compute.manager [None req-f8b24120-387e-441c-b47e-4319efc188e9 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: 9b495caf-4394-40c0-b68f-d02c7d759a6a] Terminating instance [ 1735.446710] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f3478f72-e7cb-4106-8e6c-c14c8f96572f tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Acquiring lock "dbb5dda5-5420-4d7b-8b32-152d51cb2fb9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1735.446710] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f3478f72-e7cb-4106-8e6c-c14c8f96572f tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Lock "dbb5dda5-5420-4d7b-8b32-152d51cb2fb9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1735.446841] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f3478f72-e7cb-4106-8e6c-c14c8f96572f tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Acquiring lock "dbb5dda5-5420-4d7b-8b32-152d51cb2fb9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1735.446964] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f3478f72-e7cb-4106-8e6c-c14c8f96572f tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Lock "dbb5dda5-5420-4d7b-8b32-152d51cb2fb9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1735.447178] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f3478f72-e7cb-4106-8e6c-c14c8f96572f tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Lock "dbb5dda5-5420-4d7b-8b32-152d51cb2fb9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1735.450258] env[62405]: INFO nova.compute.manager [None req-f3478f72-e7cb-4106-8e6c-c14c8f96572f tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: dbb5dda5-5420-4d7b-8b32-152d51cb2fb9] Terminating instance [ 1735.477209] env[62405]: DEBUG nova.network.neutron [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a1a84837-deef-4ffc-8a47-4891bfc2c87a] Updating instance_info_cache with network_info: [{"id": "64634a81-f1e1-4078-894a-2f4e8b56de13", "address": "fa:16:3e:e0:c1:e2", "network": {"id": "845c4582-a0c8-4565-8025-5cc0fd22ff9a", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1066509768-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f1a1645e38674042828c78155974f95e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap64634a81-f1", "ovs_interfaceid": "64634a81-f1e1-4078-894a-2f4e8b56de13", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1735.660768] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48bf4c39-0e3e-4db6-ac5e-451e7a3088fc {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.670227] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca7499a5-6614-4ec3-a0e3-cbf606ff3947 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.706582] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-922c339c-1382-4bfb-aa34-75271a86f6af {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.715049] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea92ac92-0758-4625-ab1f-ff6d2207bfc7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.730197] env[62405]: DEBUG nova.compute.provider_tree [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1735.775572] env[62405]: DEBUG oslo_concurrency.lockutils [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1735.803944] env[62405]: DEBUG oslo_vmware.api [None req-9f69f501-45bd-4c8d-8379-84744a1a9aba tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947425, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.085988} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1735.804187] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-9f69f501-45bd-4c8d-8379-84744a1a9aba tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 79548471-56f8-410c-a664-d2242541cd2a] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1735.805290] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62c2258e-7bcb-42cb-af09-e6ec675220ea {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.827552] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-9f69f501-45bd-4c8d-8379-84744a1a9aba tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 79548471-56f8-410c-a664-d2242541cd2a] Reconfiguring VM instance instance-0000003b to attach disk [datastore1] 79548471-56f8-410c-a664-d2242541cd2a/79548471-56f8-410c-a664-d2242541cd2a.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1735.827882] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-13808db5-36bf-44ea-9264-cf5a182ce96a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.850030] env[62405]: DEBUG oslo_vmware.api [None req-9f69f501-45bd-4c8d-8379-84744a1a9aba tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Waiting for the task: (returnval){ [ 1735.850030] env[62405]: value = "task-1947426" [ 1735.850030] env[62405]: _type = "Task" [ 1735.850030] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1735.859107] env[62405]: DEBUG oslo_vmware.api [None req-9f69f501-45bd-4c8d-8379-84744a1a9aba tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947426, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1735.873832] env[62405]: DEBUG nova.compute.manager [None req-f8b24120-387e-441c-b47e-4319efc188e9 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: 9b495caf-4394-40c0-b68f-d02c7d759a6a] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1735.874609] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-f8b24120-387e-441c-b47e-4319efc188e9 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: 9b495caf-4394-40c0-b68f-d02c7d759a6a] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1735.875011] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2947f4e-8b5c-4a12-89e2-ec8e4fcf18a6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.883791] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8b24120-387e-441c-b47e-4319efc188e9 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: 9b495caf-4394-40c0-b68f-d02c7d759a6a] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1735.884054] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dea66f92-f8f5-420d-b620-d9c14cb65e32 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.892375] env[62405]: DEBUG oslo_vmware.api [None req-f8b24120-387e-441c-b47e-4319efc188e9 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Waiting for the task: (returnval){ [ 1735.892375] env[62405]: value = "task-1947427" [ 1735.892375] env[62405]: _type = "Task" [ 1735.892375] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1735.901882] env[62405]: DEBUG oslo_vmware.api [None req-f8b24120-387e-441c-b47e-4319efc188e9 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Task: {'id': task-1947427, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1735.957011] env[62405]: DEBUG nova.compute.manager [None req-f3478f72-e7cb-4106-8e6c-c14c8f96572f tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: dbb5dda5-5420-4d7b-8b32-152d51cb2fb9] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1735.957295] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-f3478f72-e7cb-4106-8e6c-c14c8f96572f tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: dbb5dda5-5420-4d7b-8b32-152d51cb2fb9] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1735.958465] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82d9e404-c4d0-4d37-81fe-12915ccd2f9d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.967181] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3478f72-e7cb-4106-8e6c-c14c8f96572f tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: dbb5dda5-5420-4d7b-8b32-152d51cb2fb9] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1735.967479] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-30639aac-e1b1-4c46-ae6d-20e3a2c5f9c1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.975431] env[62405]: DEBUG oslo_vmware.api [None req-f3478f72-e7cb-4106-8e6c-c14c8f96572f tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Waiting for the task: (returnval){ [ 1735.975431] env[62405]: value = "task-1947428" [ 1735.975431] env[62405]: _type = "Task" [ 1735.975431] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1735.979904] env[62405]: DEBUG oslo_concurrency.lockutils [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Releasing lock "refresh_cache-a1a84837-deef-4ffc-8a47-4891bfc2c87a" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1735.980298] env[62405]: DEBUG nova.compute.manager [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a1a84837-deef-4ffc-8a47-4891bfc2c87a] Instance network_info: |[{"id": "64634a81-f1e1-4078-894a-2f4e8b56de13", "address": "fa:16:3e:e0:c1:e2", "network": {"id": "845c4582-a0c8-4565-8025-5cc0fd22ff9a", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1066509768-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f1a1645e38674042828c78155974f95e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap64634a81-f1", "ovs_interfaceid": "64634a81-f1e1-4078-894a-2f4e8b56de13", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1735.980786] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a1a84837-deef-4ffc-8a47-4891bfc2c87a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e0:c1:e2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ca83c3bc-f3ec-42ab-85b3-192512f766f3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '64634a81-f1e1-4078-894a-2f4e8b56de13', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1735.989011] env[62405]: DEBUG oslo.service.loopingcall [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1735.989234] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a1a84837-deef-4ffc-8a47-4891bfc2c87a] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1735.989524] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5e1a4993-f0e5-4383-be91-456983218be3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.007920] env[62405]: DEBUG oslo_vmware.api [None req-f3478f72-e7cb-4106-8e6c-c14c8f96572f tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Task: {'id': task-1947428, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1736.015702] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1736.015702] env[62405]: value = "task-1947429" [ 1736.015702] env[62405]: _type = "Task" [ 1736.015702] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1736.024836] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947429, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1736.233544] env[62405]: DEBUG nova.compute.manager [req-bb522dc1-a9c8-4851-9de5-ed4415fd1dbe req-aa1e28c2-4e2d-46ad-997f-2dee7b44ee95 service nova] [instance: a1a84837-deef-4ffc-8a47-4891bfc2c87a] Received event network-changed-64634a81-f1e1-4078-894a-2f4e8b56de13 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1736.233614] env[62405]: DEBUG nova.compute.manager [req-bb522dc1-a9c8-4851-9de5-ed4415fd1dbe req-aa1e28c2-4e2d-46ad-997f-2dee7b44ee95 service nova] [instance: a1a84837-deef-4ffc-8a47-4891bfc2c87a] Refreshing instance network info cache due to event network-changed-64634a81-f1e1-4078-894a-2f4e8b56de13. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1736.233890] env[62405]: DEBUG oslo_concurrency.lockutils [req-bb522dc1-a9c8-4851-9de5-ed4415fd1dbe req-aa1e28c2-4e2d-46ad-997f-2dee7b44ee95 service nova] Acquiring lock "refresh_cache-a1a84837-deef-4ffc-8a47-4891bfc2c87a" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1736.234107] env[62405]: DEBUG oslo_concurrency.lockutils [req-bb522dc1-a9c8-4851-9de5-ed4415fd1dbe req-aa1e28c2-4e2d-46ad-997f-2dee7b44ee95 service nova] Acquired lock "refresh_cache-a1a84837-deef-4ffc-8a47-4891bfc2c87a" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1736.234310] env[62405]: DEBUG nova.network.neutron [req-bb522dc1-a9c8-4851-9de5-ed4415fd1dbe req-aa1e28c2-4e2d-46ad-997f-2dee7b44ee95 service nova] [instance: a1a84837-deef-4ffc-8a47-4891bfc2c87a] Refreshing network info cache for port 64634a81-f1e1-4078-894a-2f4e8b56de13 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1736.258865] env[62405]: ERROR nova.scheduler.client.report [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [req-ae2357b1-f30b-4b7e-9885-ca34b0ca8102] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7d5eded7-a501-4fa6-b1d3-60e273d555d7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-ae2357b1-f30b-4b7e-9885-ca34b0ca8102"}]} [ 1736.275934] env[62405]: DEBUG nova.scheduler.client.report [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Refreshing inventories for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1736.291339] env[62405]: DEBUG nova.scheduler.client.report [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Updating ProviderTree inventory for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1736.291779] env[62405]: DEBUG nova.compute.provider_tree [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1736.305345] env[62405]: DEBUG nova.scheduler.client.report [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Refreshing aggregate associations for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7, aggregates: None {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1736.325218] env[62405]: DEBUG nova.scheduler.client.report [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Refreshing trait associations for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1736.364861] env[62405]: DEBUG oslo_vmware.api [None req-9f69f501-45bd-4c8d-8379-84744a1a9aba tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947426, 'name': ReconfigVM_Task, 'duration_secs': 0.42954} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1736.365196] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-9f69f501-45bd-4c8d-8379-84744a1a9aba tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 79548471-56f8-410c-a664-d2242541cd2a] Reconfigured VM instance instance-0000003b to attach disk [datastore1] 79548471-56f8-410c-a664-d2242541cd2a/79548471-56f8-410c-a664-d2242541cd2a.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1736.366172] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bf86a289-eb26-4bda-b843-d01525a8b674 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.380234] env[62405]: DEBUG oslo_vmware.api [None req-9f69f501-45bd-4c8d-8379-84744a1a9aba tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Waiting for the task: (returnval){ [ 1736.380234] env[62405]: value = "task-1947430" [ 1736.380234] env[62405]: _type = "Task" [ 1736.380234] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1736.392320] env[62405]: DEBUG oslo_vmware.api [None req-9f69f501-45bd-4c8d-8379-84744a1a9aba tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947430, 'name': Rename_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1736.403710] env[62405]: DEBUG oslo_vmware.api [None req-f8b24120-387e-441c-b47e-4319efc188e9 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Task: {'id': task-1947427, 'name': PowerOffVM_Task, 'duration_secs': 0.240118} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1736.404047] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8b24120-387e-441c-b47e-4319efc188e9 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: 9b495caf-4394-40c0-b68f-d02c7d759a6a] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1736.404228] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-f8b24120-387e-441c-b47e-4319efc188e9 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: 9b495caf-4394-40c0-b68f-d02c7d759a6a] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1736.404485] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fd5903d8-9cf8-4a15-89f7-9bb2a0fc0bd5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.488256] env[62405]: DEBUG oslo_vmware.api [None req-f3478f72-e7cb-4106-8e6c-c14c8f96572f tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Task: {'id': task-1947428, 'name': PowerOffVM_Task, 'duration_secs': 0.255697} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1736.488569] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3478f72-e7cb-4106-8e6c-c14c8f96572f tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: dbb5dda5-5420-4d7b-8b32-152d51cb2fb9] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1736.488711] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-f3478f72-e7cb-4106-8e6c-c14c8f96572f tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: dbb5dda5-5420-4d7b-8b32-152d51cb2fb9] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1736.490121] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e1e18685-3b53-4d94-96ec-f5eaa4a40bf8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.530048] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947429, 'name': CreateVM_Task, 'duration_secs': 0.504203} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1736.530048] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a1a84837-deef-4ffc-8a47-4891bfc2c87a] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1736.530681] env[62405]: DEBUG oslo_concurrency.lockutils [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1736.530864] env[62405]: DEBUG oslo_concurrency.lockutils [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1736.531205] env[62405]: DEBUG oslo_concurrency.lockutils [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1736.531473] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7375061f-631e-4ef9-8173-2d5f241907ac {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.536731] env[62405]: DEBUG oslo_vmware.api [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for the task: (returnval){ [ 1736.536731] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5227741d-5153-f0fd-c0e7-7be1562bf5e6" [ 1736.536731] env[62405]: _type = "Task" [ 1736.536731] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1736.548258] env[62405]: DEBUG oslo_vmware.api [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5227741d-5153-f0fd-c0e7-7be1562bf5e6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1736.656317] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-f8b24120-387e-441c-b47e-4319efc188e9 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: 9b495caf-4394-40c0-b68f-d02c7d759a6a] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1736.656569] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-f8b24120-387e-441c-b47e-4319efc188e9 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: 9b495caf-4394-40c0-b68f-d02c7d759a6a] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1736.656825] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8b24120-387e-441c-b47e-4319efc188e9 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Deleting the datastore file [datastore1] 9b495caf-4394-40c0-b68f-d02c7d759a6a {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1736.657401] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2062ca52-f721-4ba7-b43c-cf0dc66ae0b3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.671007] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-f3478f72-e7cb-4106-8e6c-c14c8f96572f tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: dbb5dda5-5420-4d7b-8b32-152d51cb2fb9] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1736.671323] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-f3478f72-e7cb-4106-8e6c-c14c8f96572f tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: dbb5dda5-5420-4d7b-8b32-152d51cb2fb9] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1736.671545] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-f3478f72-e7cb-4106-8e6c-c14c8f96572f tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Deleting the datastore file [datastore1] dbb5dda5-5420-4d7b-8b32-152d51cb2fb9 {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1736.671917] env[62405]: DEBUG oslo_vmware.api [None req-f8b24120-387e-441c-b47e-4319efc188e9 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Waiting for the task: (returnval){ [ 1736.671917] env[62405]: value = "task-1947433" [ 1736.671917] env[62405]: _type = "Task" [ 1736.671917] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1736.672391] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-af0f3029-eb0c-463a-91e7-ccd203d837f7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.685783] env[62405]: DEBUG oslo_vmware.api [None req-f8b24120-387e-441c-b47e-4319efc188e9 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Task: {'id': task-1947433, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1736.687217] env[62405]: DEBUG oslo_vmware.api [None req-f3478f72-e7cb-4106-8e6c-c14c8f96572f tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Waiting for the task: (returnval){ [ 1736.687217] env[62405]: value = "task-1947434" [ 1736.687217] env[62405]: _type = "Task" [ 1736.687217] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1736.699843] env[62405]: DEBUG oslo_vmware.api [None req-f3478f72-e7cb-4106-8e6c-c14c8f96572f tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Task: {'id': task-1947434, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1736.765238] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9b25b53-f2e1-445f-82c0-ac98997e3ae0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.775733] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b47f0b90-6045-4441-b34e-1bf22a8c791e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.814560] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6df98156-1d65-41c1-b601-da0ae0818912 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.825231] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88395f51-ae40-43e2-a740-e343afb2e407 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.844435] env[62405]: DEBUG nova.compute.provider_tree [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1736.888171] env[62405]: DEBUG oslo_vmware.rw_handles [None req-8b59a3e3-61d0-469c-bb7b-a8a5014805bd tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/529a10f5-3866-d6ed-20f4-a1f524a461b8/disk-0.vmdk. {{(pid=62405) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1736.889968] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7b56034-9a48-4731-bd36-9983698cf103 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.898661] env[62405]: DEBUG oslo_vmware.rw_handles [None req-8b59a3e3-61d0-469c-bb7b-a8a5014805bd tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/529a10f5-3866-d6ed-20f4-a1f524a461b8/disk-0.vmdk is in state: ready. {{(pid=62405) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1736.898860] env[62405]: ERROR oslo_vmware.rw_handles [None req-8b59a3e3-61d0-469c-bb7b-a8a5014805bd tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/529a10f5-3866-d6ed-20f4-a1f524a461b8/disk-0.vmdk due to incomplete transfer. [ 1736.906712] env[62405]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-59cc2321-553e-4d6c-91ec-24d1290bd2f7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.909188] env[62405]: DEBUG oslo_vmware.api [None req-9f69f501-45bd-4c8d-8379-84744a1a9aba tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947430, 'name': Rename_Task, 'duration_secs': 0.170485} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1736.909960] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f69f501-45bd-4c8d-8379-84744a1a9aba tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 79548471-56f8-410c-a664-d2242541cd2a] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1736.910750] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-61ea720b-ec5a-40c7-bf1b-a8a6bb78188e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.918501] env[62405]: DEBUG oslo_vmware.rw_handles [None req-8b59a3e3-61d0-469c-bb7b-a8a5014805bd tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/529a10f5-3866-d6ed-20f4-a1f524a461b8/disk-0.vmdk. {{(pid=62405) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1736.918737] env[62405]: DEBUG nova.virt.vmwareapi.images [None req-8b59a3e3-61d0-469c-bb7b-a8a5014805bd tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Uploaded image e3b71764-21b0-4151-a85f-eb549854f430 to the Glance image server {{(pid=62405) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1736.921371] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b59a3e3-61d0-469c-bb7b-a8a5014805bd tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Destroying the VM {{(pid=62405) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1736.922982] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-a8193c4f-7349-456d-9bb6-fc7062d7fe7f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.924689] env[62405]: DEBUG oslo_vmware.api [None req-9f69f501-45bd-4c8d-8379-84744a1a9aba tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Waiting for the task: (returnval){ [ 1736.924689] env[62405]: value = "task-1947435" [ 1736.924689] env[62405]: _type = "Task" [ 1736.924689] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1736.929867] env[62405]: DEBUG oslo_vmware.api [None req-8b59a3e3-61d0-469c-bb7b-a8a5014805bd tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for the task: (returnval){ [ 1736.929867] env[62405]: value = "task-1947436" [ 1736.929867] env[62405]: _type = "Task" [ 1736.929867] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1736.935831] env[62405]: DEBUG oslo_vmware.api [None req-9f69f501-45bd-4c8d-8379-84744a1a9aba tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947435, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1736.941279] env[62405]: DEBUG oslo_vmware.api [None req-8b59a3e3-61d0-469c-bb7b-a8a5014805bd tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1947436, 'name': Destroy_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1736.987335] env[62405]: DEBUG nova.network.neutron [req-bb522dc1-a9c8-4851-9de5-ed4415fd1dbe req-aa1e28c2-4e2d-46ad-997f-2dee7b44ee95 service nova] [instance: a1a84837-deef-4ffc-8a47-4891bfc2c87a] Updated VIF entry in instance network info cache for port 64634a81-f1e1-4078-894a-2f4e8b56de13. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1736.987776] env[62405]: DEBUG nova.network.neutron [req-bb522dc1-a9c8-4851-9de5-ed4415fd1dbe req-aa1e28c2-4e2d-46ad-997f-2dee7b44ee95 service nova] [instance: a1a84837-deef-4ffc-8a47-4891bfc2c87a] Updating instance_info_cache with network_info: [{"id": "64634a81-f1e1-4078-894a-2f4e8b56de13", "address": "fa:16:3e:e0:c1:e2", "network": {"id": "845c4582-a0c8-4565-8025-5cc0fd22ff9a", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1066509768-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f1a1645e38674042828c78155974f95e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap64634a81-f1", "ovs_interfaceid": "64634a81-f1e1-4078-894a-2f4e8b56de13", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1737.047283] env[62405]: DEBUG oslo_vmware.api [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5227741d-5153-f0fd-c0e7-7be1562bf5e6, 'name': SearchDatastore_Task, 'duration_secs': 0.024596} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1737.047599] env[62405]: DEBUG oslo_concurrency.lockutils [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1737.047830] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a1a84837-deef-4ffc-8a47-4891bfc2c87a] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1737.048079] env[62405]: DEBUG oslo_concurrency.lockutils [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1737.048286] env[62405]: DEBUG oslo_concurrency.lockutils [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1737.048487] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1737.049348] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f6dd23cc-c3af-486d-b358-09a0030181ff {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.059511] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1737.059695] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1737.060428] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0983de08-af2b-43f1-a5b0-679b198883bc {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.066443] env[62405]: DEBUG oslo_vmware.api [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for the task: (returnval){ [ 1737.066443] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52d0a50d-6d5c-bd6b-2f4a-179c0a0a6ee3" [ 1737.066443] env[62405]: _type = "Task" [ 1737.066443] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1737.075155] env[62405]: DEBUG oslo_vmware.api [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52d0a50d-6d5c-bd6b-2f4a-179c0a0a6ee3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1737.184641] env[62405]: DEBUG oslo_vmware.api [None req-f8b24120-387e-441c-b47e-4319efc188e9 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Task: {'id': task-1947433, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.172876} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1737.184909] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8b24120-387e-441c-b47e-4319efc188e9 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1737.185112] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-f8b24120-387e-441c-b47e-4319efc188e9 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: 9b495caf-4394-40c0-b68f-d02c7d759a6a] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1737.185290] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-f8b24120-387e-441c-b47e-4319efc188e9 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: 9b495caf-4394-40c0-b68f-d02c7d759a6a] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1737.185463] env[62405]: INFO nova.compute.manager [None req-f8b24120-387e-441c-b47e-4319efc188e9 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: 9b495caf-4394-40c0-b68f-d02c7d759a6a] Took 1.31 seconds to destroy the instance on the hypervisor. [ 1737.185759] env[62405]: DEBUG oslo.service.loopingcall [None req-f8b24120-387e-441c-b47e-4319efc188e9 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1737.185962] env[62405]: DEBUG nova.compute.manager [-] [instance: 9b495caf-4394-40c0-b68f-d02c7d759a6a] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1737.186156] env[62405]: DEBUG nova.network.neutron [-] [instance: 9b495caf-4394-40c0-b68f-d02c7d759a6a] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1737.195740] env[62405]: DEBUG oslo_vmware.api [None req-f3478f72-e7cb-4106-8e6c-c14c8f96572f tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Task: {'id': task-1947434, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.182678} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1737.195968] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-f3478f72-e7cb-4106-8e6c-c14c8f96572f tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1737.196158] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-f3478f72-e7cb-4106-8e6c-c14c8f96572f tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: dbb5dda5-5420-4d7b-8b32-152d51cb2fb9] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1737.196335] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-f3478f72-e7cb-4106-8e6c-c14c8f96572f tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: dbb5dda5-5420-4d7b-8b32-152d51cb2fb9] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1737.196506] env[62405]: INFO nova.compute.manager [None req-f3478f72-e7cb-4106-8e6c-c14c8f96572f tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: dbb5dda5-5420-4d7b-8b32-152d51cb2fb9] Took 1.24 seconds to destroy the instance on the hypervisor. [ 1737.196727] env[62405]: DEBUG oslo.service.loopingcall [None req-f3478f72-e7cb-4106-8e6c-c14c8f96572f tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1737.196911] env[62405]: DEBUG nova.compute.manager [-] [instance: dbb5dda5-5420-4d7b-8b32-152d51cb2fb9] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1737.197010] env[62405]: DEBUG nova.network.neutron [-] [instance: dbb5dda5-5420-4d7b-8b32-152d51cb2fb9] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1737.396019] env[62405]: DEBUG nova.scheduler.client.report [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Updated inventory for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with generation 97 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1737.396019] env[62405]: DEBUG nova.compute.provider_tree [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Updating resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 generation from 97 to 98 during operation: update_inventory {{(pid=62405) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1737.396019] env[62405]: DEBUG nova.compute.provider_tree [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1737.449219] env[62405]: DEBUG oslo_vmware.api [None req-9f69f501-45bd-4c8d-8379-84744a1a9aba tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947435, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1737.449755] env[62405]: DEBUG oslo_vmware.api [None req-8b59a3e3-61d0-469c-bb7b-a8a5014805bd tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1947436, 'name': Destroy_Task, 'duration_secs': 0.417539} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1737.450115] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-8b59a3e3-61d0-469c-bb7b-a8a5014805bd tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Destroyed the VM [ 1737.450453] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-8b59a3e3-61d0-469c-bb7b-a8a5014805bd tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Deleting Snapshot of the VM instance {{(pid=62405) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1737.450830] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-e5083e35-bb01-47e6-9622-f5fb2af8a577 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.466038] env[62405]: DEBUG oslo_vmware.api [None req-8b59a3e3-61d0-469c-bb7b-a8a5014805bd tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for the task: (returnval){ [ 1737.466038] env[62405]: value = "task-1947437" [ 1737.466038] env[62405]: _type = "Task" [ 1737.466038] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1737.478698] env[62405]: DEBUG oslo_vmware.api [None req-8b59a3e3-61d0-469c-bb7b-a8a5014805bd tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1947437, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1737.490239] env[62405]: DEBUG oslo_concurrency.lockutils [req-bb522dc1-a9c8-4851-9de5-ed4415fd1dbe req-aa1e28c2-4e2d-46ad-997f-2dee7b44ee95 service nova] Releasing lock "refresh_cache-a1a84837-deef-4ffc-8a47-4891bfc2c87a" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1737.582880] env[62405]: DEBUG oslo_vmware.api [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52d0a50d-6d5c-bd6b-2f4a-179c0a0a6ee3, 'name': SearchDatastore_Task, 'duration_secs': 0.031003} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1737.584793] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aa7e5f45-8569-4631-8c48-57d7a5c834b4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.592630] env[62405]: DEBUG oslo_vmware.api [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for the task: (returnval){ [ 1737.592630] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52980301-7b16-1f88-7834-752db938b0ed" [ 1737.592630] env[62405]: _type = "Task" [ 1737.592630] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1737.604767] env[62405]: DEBUG oslo_vmware.api [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52980301-7b16-1f88-7834-752db938b0ed, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1737.642381] env[62405]: DEBUG nova.compute.manager [req-61f90baa-58a0-41a7-8895-8cade5a889ff req-2c9a7ed0-b9e2-4d1f-8f38-f3eb31ed20f8 service nova] [instance: dbb5dda5-5420-4d7b-8b32-152d51cb2fb9] Received event network-vif-deleted-eba12fff-0f73-414b-bcdd-e9abed9edc58 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1737.642749] env[62405]: INFO nova.compute.manager [req-61f90baa-58a0-41a7-8895-8cade5a889ff req-2c9a7ed0-b9e2-4d1f-8f38-f3eb31ed20f8 service nova] [instance: dbb5dda5-5420-4d7b-8b32-152d51cb2fb9] Neutron deleted interface eba12fff-0f73-414b-bcdd-e9abed9edc58; detaching it from the instance and deleting it from the info cache [ 1737.643280] env[62405]: DEBUG nova.network.neutron [req-61f90baa-58a0-41a7-8895-8cade5a889ff req-2c9a7ed0-b9e2-4d1f-8f38-f3eb31ed20f8 service nova] [instance: dbb5dda5-5420-4d7b-8b32-152d51cb2fb9] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1737.901914] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.707s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1737.902551] env[62405]: DEBUG nova.compute.manager [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1737.906159] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f349824b-af14-4f91-8e16-cda5a76aaccb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 49.443s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1737.906391] env[62405]: DEBUG nova.objects.instance [None req-f349824b-af14-4f91-8e16-cda5a76aaccb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Lazy-loading 'resources' on Instance uuid f0ca0d3d-cb2b-467b-a466-c270794055d7 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1737.936295] env[62405]: DEBUG oslo_vmware.api [None req-9f69f501-45bd-4c8d-8379-84744a1a9aba tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947435, 'name': PowerOnVM_Task, 'duration_secs': 0.784161} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1737.936562] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f69f501-45bd-4c8d-8379-84744a1a9aba tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 79548471-56f8-410c-a664-d2242541cd2a] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1737.936812] env[62405]: INFO nova.compute.manager [None req-9f69f501-45bd-4c8d-8379-84744a1a9aba tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 79548471-56f8-410c-a664-d2242541cd2a] Took 8.76 seconds to spawn the instance on the hypervisor. [ 1737.936993] env[62405]: DEBUG nova.compute.manager [None req-9f69f501-45bd-4c8d-8379-84744a1a9aba tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 79548471-56f8-410c-a664-d2242541cd2a] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1737.937760] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-796b154a-6ae7-4d86-b9ce-20fbb0ae4874 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.971440] env[62405]: DEBUG nova.network.neutron [-] [instance: 9b495caf-4394-40c0-b68f-d02c7d759a6a] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1737.975856] env[62405]: DEBUG oslo_vmware.api [None req-8b59a3e3-61d0-469c-bb7b-a8a5014805bd tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1947437, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1738.039033] env[62405]: DEBUG nova.network.neutron [-] [instance: dbb5dda5-5420-4d7b-8b32-152d51cb2fb9] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1738.103646] env[62405]: DEBUG oslo_vmware.api [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52980301-7b16-1f88-7834-752db938b0ed, 'name': SearchDatastore_Task, 'duration_secs': 0.014278} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1738.104078] env[62405]: DEBUG oslo_concurrency.lockutils [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1738.104352] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] a1a84837-deef-4ffc-8a47-4891bfc2c87a/a1a84837-deef-4ffc-8a47-4891bfc2c87a.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1738.104611] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-37b70e69-772f-4f17-a64b-1512135b5afb {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.113442] env[62405]: DEBUG oslo_vmware.api [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for the task: (returnval){ [ 1738.113442] env[62405]: value = "task-1947438" [ 1738.113442] env[62405]: _type = "Task" [ 1738.113442] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1738.122203] env[62405]: DEBUG oslo_vmware.api [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947438, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1738.146223] env[62405]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-01a796d8-0744-4f8c-99dc-7f7f57e06c28 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.155904] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c41b579-4a4b-476d-a795-8afd123c7ea8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.206357] env[62405]: DEBUG nova.compute.manager [req-61f90baa-58a0-41a7-8895-8cade5a889ff req-2c9a7ed0-b9e2-4d1f-8f38-f3eb31ed20f8 service nova] [instance: dbb5dda5-5420-4d7b-8b32-152d51cb2fb9] Detach interface failed, port_id=eba12fff-0f73-414b-bcdd-e9abed9edc58, reason: Instance dbb5dda5-5420-4d7b-8b32-152d51cb2fb9 could not be found. {{(pid=62405) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11483}} [ 1738.259337] env[62405]: DEBUG nova.compute.manager [req-c73dd530-1e35-41cb-b84f-cc0c570f1698 req-3d06178a-ca9b-41c1-8ac5-a025bb786aad service nova] [instance: 9b495caf-4394-40c0-b68f-d02c7d759a6a] Received event network-vif-deleted-74bb12c4-3ef6-4bc6-b5ea-810282fe3f43 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1738.411661] env[62405]: DEBUG nova.compute.utils [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1738.416182] env[62405]: DEBUG nova.compute.manager [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1738.416358] env[62405]: DEBUG nova.network.neutron [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1738.459890] env[62405]: INFO nova.compute.manager [None req-9f69f501-45bd-4c8d-8379-84744a1a9aba tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 79548471-56f8-410c-a664-d2242541cd2a] Took 60.81 seconds to build instance. [ 1738.477776] env[62405]: INFO nova.compute.manager [-] [instance: 9b495caf-4394-40c0-b68f-d02c7d759a6a] Took 1.29 seconds to deallocate network for instance. [ 1738.478118] env[62405]: DEBUG oslo_vmware.api [None req-8b59a3e3-61d0-469c-bb7b-a8a5014805bd tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1947437, 'name': RemoveSnapshot_Task, 'duration_secs': 0.543126} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1738.482139] env[62405]: DEBUG nova.policy [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2f003aed5f864a8f933767606ae1f317', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '633b4e729a054bc69593b789af9ee070', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1738.484786] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-8b59a3e3-61d0-469c-bb7b-a8a5014805bd tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Deleted Snapshot of the VM instance {{(pid=62405) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1738.484996] env[62405]: INFO nova.compute.manager [None req-8b59a3e3-61d0-469c-bb7b-a8a5014805bd tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Took 16.46 seconds to snapshot the instance on the hypervisor. [ 1738.542245] env[62405]: INFO nova.compute.manager [-] [instance: dbb5dda5-5420-4d7b-8b32-152d51cb2fb9] Took 1.34 seconds to deallocate network for instance. [ 1738.627926] env[62405]: DEBUG oslo_vmware.api [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947438, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1738.868516] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b403753-331e-40f1-8d17-de900c57a115 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.878504] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b485719b-a9b0-410e-80d4-339389b736ed {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.915244] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a352c8e-049c-4bf8-ad29-4517ce331535 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.918473] env[62405]: DEBUG nova.compute.manager [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1738.930048] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df50893b-fd2f-434f-b334-7e10537c5659 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.948605] env[62405]: DEBUG nova.compute.provider_tree [None req-f349824b-af14-4f91-8e16-cda5a76aaccb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1738.963283] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9f69f501-45bd-4c8d-8379-84744a1a9aba tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Lock "79548471-56f8-410c-a664-d2242541cd2a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 62.322s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1738.990970] env[62405]: DEBUG nova.network.neutron [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Successfully created port: 995727bb-89db-40f7-a02b-916afa2c9641 {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1738.997776] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f8b24120-387e-441c-b47e-4319efc188e9 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1739.049453] env[62405]: DEBUG nova.compute.manager [None req-8b59a3e3-61d0-469c-bb7b-a8a5014805bd tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Found 3 images (rotation: 2) {{(pid=62405) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 1739.049665] env[62405]: DEBUG nova.compute.manager [None req-8b59a3e3-61d0-469c-bb7b-a8a5014805bd tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Rotating out 1 backups {{(pid=62405) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5025}} [ 1739.049829] env[62405]: DEBUG nova.compute.manager [None req-8b59a3e3-61d0-469c-bb7b-a8a5014805bd tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Deleting image 6ef894cb-7aec-49b0-9d6b-4b554296fb09 {{(pid=62405) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5030}} [ 1739.052623] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f3478f72-e7cb-4106-8e6c-c14c8f96572f tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1739.129184] env[62405]: DEBUG oslo_vmware.api [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947438, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1739.452464] env[62405]: DEBUG nova.scheduler.client.report [None req-f349824b-af14-4f91-8e16-cda5a76aaccb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1739.465085] env[62405]: DEBUG nova.compute.manager [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] [instance: 153adb6e-5381-4e91-881e-8e566a16905a] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1739.629221] env[62405]: DEBUG oslo_vmware.api [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947438, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.07718} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1739.629221] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] a1a84837-deef-4ffc-8a47-4891bfc2c87a/a1a84837-deef-4ffc-8a47-4891bfc2c87a.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1739.629221] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a1a84837-deef-4ffc-8a47-4891bfc2c87a] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1739.629221] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1cb91d49-9d03-486e-b749-04c5e4fcd1f1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.636058] env[62405]: DEBUG oslo_vmware.api [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for the task: (returnval){ [ 1739.636058] env[62405]: value = "task-1947439" [ 1739.636058] env[62405]: _type = "Task" [ 1739.636058] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1739.645232] env[62405]: DEBUG oslo_vmware.api [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947439, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1739.928333] env[62405]: DEBUG nova.compute.manager [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1739.953365] env[62405]: DEBUG nova.virt.hardware [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1739.953629] env[62405]: DEBUG nova.virt.hardware [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1739.953784] env[62405]: DEBUG nova.virt.hardware [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1739.953965] env[62405]: DEBUG nova.virt.hardware [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1739.954125] env[62405]: DEBUG nova.virt.hardware [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1739.954279] env[62405]: DEBUG nova.virt.hardware [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1739.954485] env[62405]: DEBUG nova.virt.hardware [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1739.954644] env[62405]: DEBUG nova.virt.hardware [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1739.954811] env[62405]: DEBUG nova.virt.hardware [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1739.954971] env[62405]: DEBUG nova.virt.hardware [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1739.955156] env[62405]: DEBUG nova.virt.hardware [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1739.956072] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbf3f8e3-7ed7-41b9-b202-92a14c083e18 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.959437] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f349824b-af14-4f91-8e16-cda5a76aaccb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.053s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1739.962331] env[62405]: DEBUG oslo_concurrency.lockutils [None req-191757c6-f44a-4279-9f83-a52c84b6e0dc tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 48.038s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1739.962331] env[62405]: DEBUG nova.objects.instance [None req-191757c6-f44a-4279-9f83-a52c84b6e0dc tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Lazy-loading 'resources' on Instance uuid 6213702e-8e39-4342-b62f-2c9495017bf9 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1739.971356] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b33b2e0e-f639-4df2-a67f-7a02d792e51b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.989656] env[62405]: INFO nova.scheduler.client.report [None req-f349824b-af14-4f91-8e16-cda5a76aaccb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Deleted allocations for instance f0ca0d3d-cb2b-467b-a466-c270794055d7 [ 1739.991073] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1740.146672] env[62405]: DEBUG oslo_vmware.api [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947439, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073251} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1740.146993] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a1a84837-deef-4ffc-8a47-4891bfc2c87a] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1740.147798] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65653d70-e679-44d3-8f0e-d94f084b8d28 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.170675] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a1a84837-deef-4ffc-8a47-4891bfc2c87a] Reconfiguring VM instance instance-0000003c to attach disk [datastore1] a1a84837-deef-4ffc-8a47-4891bfc2c87a/a1a84837-deef-4ffc-8a47-4891bfc2c87a.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1740.170984] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0153039a-4341-4d4f-9f28-f41e4e87f21a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.192284] env[62405]: DEBUG oslo_vmware.api [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for the task: (returnval){ [ 1740.192284] env[62405]: value = "task-1947440" [ 1740.192284] env[62405]: _type = "Task" [ 1740.192284] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1740.200363] env[62405]: DEBUG oslo_vmware.api [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947440, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1740.497982] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f349824b-af14-4f91-8e16-cda5a76aaccb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Lock "f0ca0d3d-cb2b-467b-a466-c270794055d7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 55.800s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1740.550657] env[62405]: DEBUG nova.compute.manager [None req-a8be5974-5301-4735-be46-0a4af2f53cdb tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 79548471-56f8-410c-a664-d2242541cd2a] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1740.551561] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4008df87-946f-42c7-9863-7d24d7e65204 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.707186] env[62405]: DEBUG oslo_vmware.api [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947440, 'name': ReconfigVM_Task, 'duration_secs': 0.296359} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1740.707476] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a1a84837-deef-4ffc-8a47-4891bfc2c87a] Reconfigured VM instance instance-0000003c to attach disk [datastore1] a1a84837-deef-4ffc-8a47-4891bfc2c87a/a1a84837-deef-4ffc-8a47-4891bfc2c87a.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1740.708165] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-15a3402b-7e25-4785-967f-c665fb208283 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.715887] env[62405]: DEBUG oslo_vmware.api [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for the task: (returnval){ [ 1740.715887] env[62405]: value = "task-1947441" [ 1740.715887] env[62405]: _type = "Task" [ 1740.715887] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1740.723962] env[62405]: DEBUG oslo_vmware.api [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947441, 'name': Rename_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1740.850746] env[62405]: DEBUG nova.compute.manager [req-d1d7fd60-ed25-4694-946d-2d6f190b078a req-cc20e987-59a9-4dc9-aa7b-1d76e9cf16c4 service nova] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Received event network-vif-plugged-995727bb-89db-40f7-a02b-916afa2c9641 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1740.850969] env[62405]: DEBUG oslo_concurrency.lockutils [req-d1d7fd60-ed25-4694-946d-2d6f190b078a req-cc20e987-59a9-4dc9-aa7b-1d76e9cf16c4 service nova] Acquiring lock "d186b2f4-3fd1-44be-b8a4-080972aff3a0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1740.851866] env[62405]: DEBUG oslo_concurrency.lockutils [req-d1d7fd60-ed25-4694-946d-2d6f190b078a req-cc20e987-59a9-4dc9-aa7b-1d76e9cf16c4 service nova] Lock "d186b2f4-3fd1-44be-b8a4-080972aff3a0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1740.851866] env[62405]: DEBUG oslo_concurrency.lockutils [req-d1d7fd60-ed25-4694-946d-2d6f190b078a req-cc20e987-59a9-4dc9-aa7b-1d76e9cf16c4 service nova] Lock "d186b2f4-3fd1-44be-b8a4-080972aff3a0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1740.851866] env[62405]: DEBUG nova.compute.manager [req-d1d7fd60-ed25-4694-946d-2d6f190b078a req-cc20e987-59a9-4dc9-aa7b-1d76e9cf16c4 service nova] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] No waiting events found dispatching network-vif-plugged-995727bb-89db-40f7-a02b-916afa2c9641 {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1740.851866] env[62405]: WARNING nova.compute.manager [req-d1d7fd60-ed25-4694-946d-2d6f190b078a req-cc20e987-59a9-4dc9-aa7b-1d76e9cf16c4 service nova] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Received unexpected event network-vif-plugged-995727bb-89db-40f7-a02b-916afa2c9641 for instance with vm_state building and task_state spawning. [ 1740.927316] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9f05979-564a-49ad-abbb-99d3adbeeb4d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.935890] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bc14b06-2753-4fcc-a6ed-a935f752b2dd {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.966984] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1b933f9-d371-49ad-8397-16410c31a062 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.972027] env[62405]: DEBUG nova.network.neutron [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Successfully updated port: 995727bb-89db-40f7-a02b-916afa2c9641 {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1740.977061] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f050537-11a9-4f06-8010-563382cd98e3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.994031] env[62405]: DEBUG nova.compute.provider_tree [None req-191757c6-f44a-4279-9f83-a52c84b6e0dc tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1741.064836] env[62405]: INFO nova.compute.manager [None req-a8be5974-5301-4735-be46-0a4af2f53cdb tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 79548471-56f8-410c-a664-d2242541cd2a] instance snapshotting [ 1741.067775] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75ed6aa3-609e-4034-8849-f7b08f927b89 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.087368] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ffc55cd-7b28-44c0-a405-c2762499eed3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.228204] env[62405]: DEBUG oslo_vmware.api [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947441, 'name': Rename_Task, 'duration_secs': 0.15531} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1741.228491] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a1a84837-deef-4ffc-8a47-4891bfc2c87a] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1741.228811] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fddae57a-316f-4d00-9e04-3e1c41f8ed51 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.236726] env[62405]: DEBUG oslo_vmware.api [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for the task: (returnval){ [ 1741.236726] env[62405]: value = "task-1947442" [ 1741.236726] env[62405]: _type = "Task" [ 1741.236726] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1741.245369] env[62405]: DEBUG oslo_vmware.api [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947442, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1741.479485] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Acquiring lock "refresh_cache-d186b2f4-3fd1-44be-b8a4-080972aff3a0" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1741.479485] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Acquired lock "refresh_cache-d186b2f4-3fd1-44be-b8a4-080972aff3a0" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1741.479485] env[62405]: DEBUG nova.network.neutron [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1741.497465] env[62405]: DEBUG nova.scheduler.client.report [None req-191757c6-f44a-4279-9f83-a52c84b6e0dc tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1741.600270] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-a8be5974-5301-4735-be46-0a4af2f53cdb tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 79548471-56f8-410c-a664-d2242541cd2a] Creating Snapshot of the VM instance {{(pid=62405) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1741.601426] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-4f34f2ee-dd03-438a-bcea-c9dc68cefda6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.616257] env[62405]: DEBUG oslo_vmware.api [None req-a8be5974-5301-4735-be46-0a4af2f53cdb tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Waiting for the task: (returnval){ [ 1741.616257] env[62405]: value = "task-1947443" [ 1741.616257] env[62405]: _type = "Task" [ 1741.616257] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1741.628040] env[62405]: DEBUG oslo_vmware.api [None req-a8be5974-5301-4735-be46-0a4af2f53cdb tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947443, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1741.749926] env[62405]: DEBUG oslo_vmware.api [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947442, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1742.005026] env[62405]: DEBUG oslo_concurrency.lockutils [None req-191757c6-f44a-4279-9f83-a52c84b6e0dc tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.043s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1742.007356] env[62405]: DEBUG oslo_concurrency.lockutils [None req-436e8c88-ca82-462a-9aec-7f7d0c3c9ecf tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 49.432s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1742.007592] env[62405]: DEBUG nova.objects.instance [None req-436e8c88-ca82-462a-9aec-7f7d0c3c9ecf tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Lazy-loading 'resources' on Instance uuid 377365a4-7538-4bab-a181-1940e6fb4066 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1742.022137] env[62405]: INFO nova.scheduler.client.report [None req-191757c6-f44a-4279-9f83-a52c84b6e0dc tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Deleted allocations for instance 6213702e-8e39-4342-b62f-2c9495017bf9 [ 1742.027059] env[62405]: DEBUG nova.network.neutron [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1742.126482] env[62405]: DEBUG oslo_vmware.api [None req-a8be5974-5301-4735-be46-0a4af2f53cdb tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947443, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1742.246674] env[62405]: DEBUG oslo_concurrency.lockutils [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Acquiring lock "ff8731d6-3c55-4ddc-aeb1-308d72313881" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1742.246982] env[62405]: DEBUG oslo_concurrency.lockutils [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Lock "ff8731d6-3c55-4ddc-aeb1-308d72313881" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1742.252037] env[62405]: DEBUG oslo_vmware.api [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947442, 'name': PowerOnVM_Task, 'duration_secs': 0.696495} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1742.252295] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a1a84837-deef-4ffc-8a47-4891bfc2c87a] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1742.252503] env[62405]: INFO nova.compute.manager [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a1a84837-deef-4ffc-8a47-4891bfc2c87a] Took 8.06 seconds to spawn the instance on the hypervisor. [ 1742.252687] env[62405]: DEBUG nova.compute.manager [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a1a84837-deef-4ffc-8a47-4891bfc2c87a] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1742.253610] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0990072e-0948-4d10-b88e-dd983e701775 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.286860] env[62405]: DEBUG nova.network.neutron [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Updating instance_info_cache with network_info: [{"id": "995727bb-89db-40f7-a02b-916afa2c9641", "address": "fa:16:3e:15:17:ac", "network": {"id": "2019f333-b70a-4976-97ee-8748220e1f48", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-558435229-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "633b4e729a054bc69593b789af9ee070", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb143ef7-8271-4a8a-a4aa-8eba9a89f6a1", "external-id": "nsx-vlan-transportzone-504", "segmentation_id": 504, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap995727bb-89", "ovs_interfaceid": "995727bb-89db-40f7-a02b-916afa2c9641", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1742.532760] env[62405]: DEBUG oslo_concurrency.lockutils [None req-191757c6-f44a-4279-9f83-a52c84b6e0dc tempest-ServerRescueTestJSONUnderV235-2078612695 tempest-ServerRescueTestJSONUnderV235-2078612695-project-member] Lock "6213702e-8e39-4342-b62f-2c9495017bf9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 54.647s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1742.633819] env[62405]: DEBUG oslo_vmware.api [None req-a8be5974-5301-4735-be46-0a4af2f53cdb tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947443, 'name': CreateSnapshot_Task, 'duration_secs': 0.779894} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1742.634145] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-a8be5974-5301-4735-be46-0a4af2f53cdb tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 79548471-56f8-410c-a664-d2242541cd2a] Created Snapshot of the VM instance {{(pid=62405) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1742.634905] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3049ae0-fda2-4944-8e4c-ca0bf0000cbc {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.774402] env[62405]: INFO nova.compute.manager [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a1a84837-deef-4ffc-8a47-4891bfc2c87a] Took 60.76 seconds to build instance. [ 1742.791203] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Releasing lock "refresh_cache-d186b2f4-3fd1-44be-b8a4-080972aff3a0" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1742.791524] env[62405]: DEBUG nova.compute.manager [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Instance network_info: |[{"id": "995727bb-89db-40f7-a02b-916afa2c9641", "address": "fa:16:3e:15:17:ac", "network": {"id": "2019f333-b70a-4976-97ee-8748220e1f48", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-558435229-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "633b4e729a054bc69593b789af9ee070", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb143ef7-8271-4a8a-a4aa-8eba9a89f6a1", "external-id": "nsx-vlan-transportzone-504", "segmentation_id": 504, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap995727bb-89", "ovs_interfaceid": "995727bb-89db-40f7-a02b-916afa2c9641", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1742.792171] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:15:17:ac', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'fb143ef7-8271-4a8a-a4aa-8eba9a89f6a1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '995727bb-89db-40f7-a02b-916afa2c9641', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1742.800187] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Creating folder: Project (633b4e729a054bc69593b789af9ee070). Parent ref: group-v401284. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1742.803092] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ca031c1b-3222-4113-8516-a1a6422399cc {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.806189] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c81ef7e5-9199-4254-acb8-208fedc04ee1 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Acquiring lock "15218373-ffa5-49ce-b604-423b7fc5fb35" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1742.806189] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c81ef7e5-9199-4254-acb8-208fedc04ee1 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Lock "15218373-ffa5-49ce-b604-423b7fc5fb35" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1742.806189] env[62405]: DEBUG nova.compute.manager [None req-c81ef7e5-9199-4254-acb8-208fedc04ee1 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1742.806726] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-775155b2-c14a-45e9-af55-4b44bdc48c2e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.814797] env[62405]: DEBUG nova.compute.manager [None req-c81ef7e5-9199-4254-acb8-208fedc04ee1 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62405) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1742.814910] env[62405]: DEBUG nova.objects.instance [None req-c81ef7e5-9199-4254-acb8-208fedc04ee1 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Lazy-loading 'flavor' on Instance uuid 15218373-ffa5-49ce-b604-423b7fc5fb35 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1742.820681] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Created folder: Project (633b4e729a054bc69593b789af9ee070) in parent group-v401284. [ 1742.820891] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Creating folder: Instances. Parent ref: group-v401469. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1742.821163] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-47d2939b-58b3-499f-813a-2d112cee219f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.832899] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Created folder: Instances in parent group-v401469. [ 1742.833167] env[62405]: DEBUG oslo.service.loopingcall [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1742.835813] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1742.836582] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d494968b-38ac-4b4b-b894-349d56c2df9c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.862547] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1742.862547] env[62405]: value = "task-1947446" [ 1742.862547] env[62405]: _type = "Task" [ 1742.862547] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1742.874153] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947446, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1742.888602] env[62405]: DEBUG nova.compute.manager [req-3464e61b-42ce-4024-8d7f-b2f3383f8416 req-5cb028c5-fc1f-42be-a4fb-e195fe77aa94 service nova] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Received event network-changed-995727bb-89db-40f7-a02b-916afa2c9641 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1742.888865] env[62405]: DEBUG nova.compute.manager [req-3464e61b-42ce-4024-8d7f-b2f3383f8416 req-5cb028c5-fc1f-42be-a4fb-e195fe77aa94 service nova] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Refreshing instance network info cache due to event network-changed-995727bb-89db-40f7-a02b-916afa2c9641. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1742.889833] env[62405]: DEBUG oslo_concurrency.lockutils [req-3464e61b-42ce-4024-8d7f-b2f3383f8416 req-5cb028c5-fc1f-42be-a4fb-e195fe77aa94 service nova] Acquiring lock "refresh_cache-d186b2f4-3fd1-44be-b8a4-080972aff3a0" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1742.889919] env[62405]: DEBUG oslo_concurrency.lockutils [req-3464e61b-42ce-4024-8d7f-b2f3383f8416 req-5cb028c5-fc1f-42be-a4fb-e195fe77aa94 service nova] Acquired lock "refresh_cache-d186b2f4-3fd1-44be-b8a4-080972aff3a0" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1742.890082] env[62405]: DEBUG nova.network.neutron [req-3464e61b-42ce-4024-8d7f-b2f3383f8416 req-5cb028c5-fc1f-42be-a4fb-e195fe77aa94 service nova] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Refreshing network info cache for port 995727bb-89db-40f7-a02b-916afa2c9641 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1743.003922] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28cfe0aa-7362-4edf-b0f5-17303a8df6e7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.014582] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dde957a-6574-4ef8-9b65-7a89c1deda0f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.064162] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8b55374-cdc5-4fc6-a6e1-dca0aac721a0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.074802] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-919b6a41-e813-4e28-9a00-94edeac07bfa {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.095137] env[62405]: DEBUG nova.compute.provider_tree [None req-436e8c88-ca82-462a-9aec-7f7d0c3c9ecf tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1743.154924] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-a8be5974-5301-4735-be46-0a4af2f53cdb tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 79548471-56f8-410c-a664-d2242541cd2a] Creating linked-clone VM from snapshot {{(pid=62405) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1743.155257] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-bfcda9e7-3b65-43fc-b52b-a9aee289414a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.167088] env[62405]: DEBUG oslo_vmware.api [None req-a8be5974-5301-4735-be46-0a4af2f53cdb tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Waiting for the task: (returnval){ [ 1743.167088] env[62405]: value = "task-1947447" [ 1743.167088] env[62405]: _type = "Task" [ 1743.167088] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1743.176302] env[62405]: DEBUG oslo_vmware.api [None req-a8be5974-5301-4735-be46-0a4af2f53cdb tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947447, 'name': CloneVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1743.276502] env[62405]: DEBUG oslo_concurrency.lockutils [None req-73a852c3-37da-4b89-821f-256fb85b1071 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Lock "a1a84837-deef-4ffc-8a47-4891bfc2c87a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 63.973s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1743.374340] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947446, 'name': CreateVM_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1743.598924] env[62405]: DEBUG nova.scheduler.client.report [None req-436e8c88-ca82-462a-9aec-7f7d0c3c9ecf tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1743.678525] env[62405]: DEBUG oslo_vmware.api [None req-a8be5974-5301-4735-be46-0a4af2f53cdb tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947447, 'name': CloneVM_Task} progress is 94%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1743.749883] env[62405]: DEBUG nova.network.neutron [req-3464e61b-42ce-4024-8d7f-b2f3383f8416 req-5cb028c5-fc1f-42be-a4fb-e195fe77aa94 service nova] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Updated VIF entry in instance network info cache for port 995727bb-89db-40f7-a02b-916afa2c9641. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1743.750036] env[62405]: DEBUG nova.network.neutron [req-3464e61b-42ce-4024-8d7f-b2f3383f8416 req-5cb028c5-fc1f-42be-a4fb-e195fe77aa94 service nova] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Updating instance_info_cache with network_info: [{"id": "995727bb-89db-40f7-a02b-916afa2c9641", "address": "fa:16:3e:15:17:ac", "network": {"id": "2019f333-b70a-4976-97ee-8748220e1f48", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-558435229-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "633b4e729a054bc69593b789af9ee070", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb143ef7-8271-4a8a-a4aa-8eba9a89f6a1", "external-id": "nsx-vlan-transportzone-504", "segmentation_id": 504, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap995727bb-89", "ovs_interfaceid": "995727bb-89db-40f7-a02b-916afa2c9641", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1743.779225] env[62405]: DEBUG nova.compute.manager [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1743.823593] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-c81ef7e5-9199-4254-acb8-208fedc04ee1 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1743.823923] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-62771767-3934-4705-9a0a-a20b720f2b63 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.833775] env[62405]: DEBUG oslo_vmware.api [None req-c81ef7e5-9199-4254-acb8-208fedc04ee1 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for the task: (returnval){ [ 1743.833775] env[62405]: value = "task-1947448" [ 1743.833775] env[62405]: _type = "Task" [ 1743.833775] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1743.845837] env[62405]: DEBUG oslo_vmware.api [None req-c81ef7e5-9199-4254-acb8-208fedc04ee1 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1947448, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1743.874748] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947446, 'name': CreateVM_Task, 'duration_secs': 0.758819} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1743.874987] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1743.875722] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1743.875913] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1743.876265] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1743.877030] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7d4dea15-ebce-4a92-ad4a-50c218123b5d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.882022] env[62405]: DEBUG oslo_vmware.api [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Waiting for the task: (returnval){ [ 1743.882022] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52f6b8c5-770d-e0a4-fe64-a83e8fd1955a" [ 1743.882022] env[62405]: _type = "Task" [ 1743.882022] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1743.892336] env[62405]: DEBUG oslo_vmware.api [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52f6b8c5-770d-e0a4-fe64-a83e8fd1955a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1744.106687] env[62405]: DEBUG oslo_concurrency.lockutils [None req-436e8c88-ca82-462a-9aec-7f7d0c3c9ecf tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.099s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1744.109359] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 50.863s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1744.110669] env[62405]: INFO nova.compute.claims [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] [instance: 46240f5b-c6ab-481b-b20c-80cc727a79f4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1744.130101] env[62405]: INFO nova.scheduler.client.report [None req-436e8c88-ca82-462a-9aec-7f7d0c3c9ecf tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Deleted allocations for instance 377365a4-7538-4bab-a181-1940e6fb4066 [ 1744.189725] env[62405]: DEBUG oslo_vmware.api [None req-a8be5974-5301-4735-be46-0a4af2f53cdb tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947447, 'name': CloneVM_Task} progress is 95%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1744.252693] env[62405]: DEBUG oslo_concurrency.lockutils [req-3464e61b-42ce-4024-8d7f-b2f3383f8416 req-5cb028c5-fc1f-42be-a4fb-e195fe77aa94 service nova] Releasing lock "refresh_cache-d186b2f4-3fd1-44be-b8a4-080972aff3a0" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1744.304068] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1744.345811] env[62405]: DEBUG oslo_vmware.api [None req-c81ef7e5-9199-4254-acb8-208fedc04ee1 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1947448, 'name': PowerOffVM_Task, 'duration_secs': 0.236999} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1744.346141] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-c81ef7e5-9199-4254-acb8-208fedc04ee1 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1744.346346] env[62405]: DEBUG nova.compute.manager [None req-c81ef7e5-9199-4254-acb8-208fedc04ee1 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1744.347120] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dc3dd6e-5718-46d4-9087-26ef12ad37f1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.393386] env[62405]: DEBUG oslo_vmware.api [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52f6b8c5-770d-e0a4-fe64-a83e8fd1955a, 'name': SearchDatastore_Task, 'duration_secs': 0.009819} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1744.393712] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1744.394225] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1744.394225] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1744.394343] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1744.394502] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1744.394771] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5fe83299-4a76-48f1-8b59-e1d604946745 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.404211] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1744.404408] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1744.405209] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-00303e7b-4616-4a15-a7fe-5262f896ee07 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.411433] env[62405]: DEBUG oslo_vmware.api [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Waiting for the task: (returnval){ [ 1744.411433] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]528e9995-66e9-e5ae-6281-67e35dfc7a76" [ 1744.411433] env[62405]: _type = "Task" [ 1744.411433] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1744.419349] env[62405]: DEBUG oslo_vmware.api [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]528e9995-66e9-e5ae-6281-67e35dfc7a76, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1744.640814] env[62405]: DEBUG oslo_concurrency.lockutils [None req-436e8c88-ca82-462a-9aec-7f7d0c3c9ecf tempest-ServerMetadataTestJSON-671122645 tempest-ServerMetadataTestJSON-671122645-project-member] Lock "377365a4-7538-4bab-a181-1940e6fb4066" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 55.926s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1744.680438] env[62405]: DEBUG oslo_vmware.api [None req-a8be5974-5301-4735-be46-0a4af2f53cdb tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947447, 'name': CloneVM_Task, 'duration_secs': 1.213456} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1744.680882] env[62405]: INFO nova.virt.vmwareapi.vmops [None req-a8be5974-5301-4735-be46-0a4af2f53cdb tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 79548471-56f8-410c-a664-d2242541cd2a] Created linked-clone VM from snapshot [ 1744.682027] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da785f60-cf09-44e1-b3fa-3f71971a0ae1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.690626] env[62405]: DEBUG nova.virt.vmwareapi.images [None req-a8be5974-5301-4735-be46-0a4af2f53cdb tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 79548471-56f8-410c-a664-d2242541cd2a] Uploading image 1e2c6626-50b1-4468-a3b7-982412fb92f3 {{(pid=62405) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1744.716050] env[62405]: DEBUG oslo_vmware.rw_handles [None req-a8be5974-5301-4735-be46-0a4af2f53cdb tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1744.716050] env[62405]: value = "vm-401472" [ 1744.716050] env[62405]: _type = "VirtualMachine" [ 1744.716050] env[62405]: }. {{(pid=62405) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1744.716327] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-050a7941-10bd-4d1f-8320-920371564855 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.725452] env[62405]: DEBUG oslo_vmware.rw_handles [None req-a8be5974-5301-4735-be46-0a4af2f53cdb tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Lease: (returnval){ [ 1744.725452] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52f08a2d-ac9a-11f1-f06b-1b5463b2b417" [ 1744.725452] env[62405]: _type = "HttpNfcLease" [ 1744.725452] env[62405]: } obtained for exporting VM: (result){ [ 1744.725452] env[62405]: value = "vm-401472" [ 1744.725452] env[62405]: _type = "VirtualMachine" [ 1744.725452] env[62405]: }. {{(pid=62405) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1744.725750] env[62405]: DEBUG oslo_vmware.api [None req-a8be5974-5301-4735-be46-0a4af2f53cdb tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Waiting for the lease: (returnval){ [ 1744.725750] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52f08a2d-ac9a-11f1-f06b-1b5463b2b417" [ 1744.725750] env[62405]: _type = "HttpNfcLease" [ 1744.725750] env[62405]: } to be ready. {{(pid=62405) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1744.733629] env[62405]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1744.733629] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52f08a2d-ac9a-11f1-f06b-1b5463b2b417" [ 1744.733629] env[62405]: _type = "HttpNfcLease" [ 1744.733629] env[62405]: } is initializing. {{(pid=62405) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1744.860970] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c81ef7e5-9199-4254-acb8-208fedc04ee1 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Lock "15218373-ffa5-49ce-b604-423b7fc5fb35" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.055s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1744.924608] env[62405]: DEBUG oslo_vmware.api [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]528e9995-66e9-e5ae-6281-67e35dfc7a76, 'name': SearchDatastore_Task, 'duration_secs': 0.012109} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1744.925468] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-51d58f51-1d2e-46e2-a9ec-fbdb491cc586 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.933218] env[62405]: DEBUG oslo_vmware.api [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Waiting for the task: (returnval){ [ 1744.933218] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52f8c632-ef97-1212-ff08-eb5ba6c40d5a" [ 1744.933218] env[62405]: _type = "Task" [ 1744.933218] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1744.944690] env[62405]: DEBUG oslo_vmware.api [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52f8c632-ef97-1212-ff08-eb5ba6c40d5a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1745.239399] env[62405]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1745.239399] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52f08a2d-ac9a-11f1-f06b-1b5463b2b417" [ 1745.239399] env[62405]: _type = "HttpNfcLease" [ 1745.239399] env[62405]: } is ready. {{(pid=62405) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1745.239753] env[62405]: DEBUG oslo_vmware.rw_handles [None req-a8be5974-5301-4735-be46-0a4af2f53cdb tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1745.239753] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52f08a2d-ac9a-11f1-f06b-1b5463b2b417" [ 1745.239753] env[62405]: _type = "HttpNfcLease" [ 1745.239753] env[62405]: }. {{(pid=62405) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1745.240569] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2ba5edf-313c-4ce9-952f-a85ac8ee64de {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.254501] env[62405]: DEBUG oslo_vmware.rw_handles [None req-a8be5974-5301-4735-be46-0a4af2f53cdb tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52030c9f-a913-029a-c38a-fdb7a5ab3962/disk-0.vmdk from lease info. {{(pid=62405) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1745.254872] env[62405]: DEBUG oslo_vmware.rw_handles [None req-a8be5974-5301-4735-be46-0a4af2f53cdb tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52030c9f-a913-029a-c38a-fdb7a5ab3962/disk-0.vmdk for reading. {{(pid=62405) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1745.443840] env[62405]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-a576c87b-51cd-40cb-90cf-dd0c4e8e79d9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.453287] env[62405]: DEBUG oslo_vmware.api [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52f8c632-ef97-1212-ff08-eb5ba6c40d5a, 'name': SearchDatastore_Task, 'duration_secs': 0.012432} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1745.453645] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1745.453934] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] d186b2f4-3fd1-44be-b8a4-080972aff3a0/d186b2f4-3fd1-44be-b8a4-080972aff3a0.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1745.454246] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ec7f74f8-1a59-41ed-b40d-3455a6773f20 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.468885] env[62405]: DEBUG oslo_vmware.api [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Waiting for the task: (returnval){ [ 1745.468885] env[62405]: value = "task-1947450" [ 1745.468885] env[62405]: _type = "Task" [ 1745.468885] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1745.480890] env[62405]: DEBUG oslo_vmware.api [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Task: {'id': task-1947450, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1745.679856] env[62405]: DEBUG nova.compute.manager [None req-f9f76991-3d7f-4093-aa61-e98012271936 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a1a84837-deef-4ffc-8a47-4891bfc2c87a] Stashing vm_state: active {{(pid=62405) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1745.726478] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-999039b2-a223-425a-b2f9-370e1a7cac4e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.737544] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad07817d-eb1a-4417-82ef-b6370e7015ea {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.774920] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47f8387d-957a-4a70-9d6f-8d086a777989 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.788166] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d411bd8-e5e2-4801-9e0c-653bf0e52017 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.803471] env[62405]: DEBUG nova.compute.provider_tree [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1745.990356] env[62405]: DEBUG oslo_vmware.api [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Task: {'id': task-1947450, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1746.214393] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f9f76991-3d7f-4093-aa61-e98012271936 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1746.331161] env[62405]: ERROR nova.scheduler.client.report [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] [req-0e9da4eb-66f3-42b5-969e-5638ac0cf300] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7d5eded7-a501-4fa6-b1d3-60e273d555d7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-0e9da4eb-66f3-42b5-969e-5638ac0cf300"}]} [ 1746.349696] env[62405]: DEBUG nova.scheduler.client.report [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Refreshing inventories for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1746.368326] env[62405]: DEBUG nova.scheduler.client.report [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Updating ProviderTree inventory for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1746.368461] env[62405]: DEBUG nova.compute.provider_tree [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1746.391361] env[62405]: DEBUG nova.scheduler.client.report [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Refreshing aggregate associations for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7, aggregates: None {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1746.414261] env[62405]: DEBUG nova.scheduler.client.report [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Refreshing trait associations for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1746.485878] env[62405]: DEBUG oslo_vmware.api [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Task: {'id': task-1947450, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1746.591474] env[62405]: DEBUG nova.compute.manager [None req-e2337ee4-aebc-4368-a415-d3997201c53c tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Stashing vm_state: stopped {{(pid=62405) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1746.915672] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d471d429-9432-47f2-a6e1-b24fdb09aec1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1746.926235] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c730b315-113c-4423-9d0e-74dcba0ca41c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1746.961536] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64782618-a46f-4085-8c7f-510b17bea4f8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1746.972958] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2749eb80-987e-49f0-a55d-4552bfad4500 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1746.995931] env[62405]: DEBUG nova.compute.provider_tree [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1747.000454] env[62405]: DEBUG oslo_vmware.api [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Task: {'id': task-1947450, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1747.126251] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e2337ee4-aebc-4368-a415-d3997201c53c tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1747.489056] env[62405]: DEBUG oslo_vmware.api [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Task: {'id': task-1947450, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.614194} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1747.489523] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] d186b2f4-3fd1-44be-b8a4-080972aff3a0/d186b2f4-3fd1-44be-b8a4-080972aff3a0.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1747.490475] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1747.490781] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b77b57b6-379e-49c7-806c-1ca5ff3db3f4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.505020] env[62405]: DEBUG oslo_vmware.api [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Waiting for the task: (returnval){ [ 1747.505020] env[62405]: value = "task-1947451" [ 1747.505020] env[62405]: _type = "Task" [ 1747.505020] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1747.520394] env[62405]: DEBUG oslo_vmware.api [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Task: {'id': task-1947451, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1747.525298] env[62405]: ERROR nova.scheduler.client.report [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] [req-6bbb0e11-8520-4813-8306-5901580b8d5f] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7d5eded7-a501-4fa6-b1d3-60e273d555d7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-6bbb0e11-8520-4813-8306-5901580b8d5f"}]} [ 1747.558738] env[62405]: DEBUG nova.scheduler.client.report [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Refreshing inventories for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1747.574476] env[62405]: DEBUG nova.scheduler.client.report [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Updating ProviderTree inventory for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1747.574720] env[62405]: DEBUG nova.compute.provider_tree [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1747.588289] env[62405]: DEBUG nova.scheduler.client.report [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Refreshing aggregate associations for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7, aggregates: None {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1747.613530] env[62405]: DEBUG nova.scheduler.client.report [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Refreshing trait associations for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1747.812707] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Acquiring lock "f16e3d13-6db6-4f61-b0e4-661856a9166b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1747.812986] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Lock "f16e3d13-6db6-4f61-b0e4-661856a9166b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1748.023875] env[62405]: DEBUG oslo_vmware.api [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Task: {'id': task-1947451, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078326} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1748.023875] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1748.025804] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-073ec0a4-7bcb-431d-aac6-f1b69a604526 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.068577] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Reconfiguring VM instance instance-0000003d to attach disk [datastore1] d186b2f4-3fd1-44be-b8a4-080972aff3a0/d186b2f4-3fd1-44be-b8a4-080972aff3a0.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1748.071685] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e21a69e0-b17e-4832-9287-d311316c14b1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.096075] env[62405]: DEBUG oslo_vmware.api [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Waiting for the task: (returnval){ [ 1748.096075] env[62405]: value = "task-1947452" [ 1748.096075] env[62405]: _type = "Task" [ 1748.096075] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1748.110858] env[62405]: DEBUG oslo_vmware.api [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Task: {'id': task-1947452, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1748.202038] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be4b2bfa-ccd9-495d-b5d7-db082d08fff8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.211037] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11597a4a-38e5-4aa8-8d67-18d95d690ef4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.246568] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b1b3277-3c57-43d4-98b2-dd22d55dd002 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.255766] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55aefa99-ac3a-4fbc-b54b-1181285b7f0e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.271899] env[62405]: DEBUG nova.compute.provider_tree [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1748.613324] env[62405]: DEBUG oslo_vmware.api [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Task: {'id': task-1947452, 'name': ReconfigVM_Task, 'duration_secs': 0.470769} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1748.613921] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Reconfigured VM instance instance-0000003d to attach disk [datastore1] d186b2f4-3fd1-44be-b8a4-080972aff3a0/d186b2f4-3fd1-44be-b8a4-080972aff3a0.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1748.614858] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-58641c24-401b-4081-943a-8144d5bbaf17 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.625713] env[62405]: DEBUG oslo_vmware.api [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Waiting for the task: (returnval){ [ 1748.625713] env[62405]: value = "task-1947453" [ 1748.625713] env[62405]: _type = "Task" [ 1748.625713] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1748.636483] env[62405]: DEBUG oslo_vmware.api [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Task: {'id': task-1947453, 'name': Rename_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1748.775174] env[62405]: DEBUG nova.scheduler.client.report [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1749.139538] env[62405]: DEBUG oslo_vmware.api [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Task: {'id': task-1947453, 'name': Rename_Task, 'duration_secs': 0.272836} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1749.140675] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1749.140806] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ea889489-b52f-44f7-8054-cb0e7f7d9de6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1749.154267] env[62405]: DEBUG oslo_vmware.api [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Waiting for the task: (returnval){ [ 1749.154267] env[62405]: value = "task-1947454" [ 1749.154267] env[62405]: _type = "Task" [ 1749.154267] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1749.162500] env[62405]: DEBUG oslo_vmware.api [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Task: {'id': task-1947454, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1749.283046] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 5.172s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1749.283046] env[62405]: DEBUG nova.compute.manager [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] [instance: 46240f5b-c6ab-481b-b20c-80cc727a79f4] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1749.284700] env[62405]: DEBUG oslo_concurrency.lockutils [None req-72b686d3-fd8d-4c4c-81cd-baf36262a0bb tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 54.614s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1749.285123] env[62405]: DEBUG nova.objects.instance [None req-72b686d3-fd8d-4c4c-81cd-baf36262a0bb tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Lazy-loading 'resources' on Instance uuid 0f2d81cb-da2a-4664-b9a2-b8c3c38ddc73 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1749.663148] env[62405]: DEBUG oslo_vmware.api [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Task: {'id': task-1947454, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1749.788693] env[62405]: DEBUG nova.compute.utils [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1749.790072] env[62405]: DEBUG nova.compute.manager [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] [instance: 46240f5b-c6ab-481b-b20c-80cc727a79f4] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1749.790285] env[62405]: DEBUG nova.network.neutron [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] [instance: 46240f5b-c6ab-481b-b20c-80cc727a79f4] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1749.880078] env[62405]: DEBUG nova.policy [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5eb8bdf5d3f34ce4ad48aba0697cfd4f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4a497089c23946fd97e9f5061ef34ff1', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1750.167079] env[62405]: DEBUG oslo_vmware.api [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Task: {'id': task-1947454, 'name': PowerOnVM_Task, 'duration_secs': 0.916084} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1750.172099] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1750.172099] env[62405]: INFO nova.compute.manager [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Took 10.24 seconds to spawn the instance on the hypervisor. [ 1750.172099] env[62405]: DEBUG nova.compute.manager [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1750.172260] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb851f87-d9a2-43d2-8a27-86f89bf96041 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.296143] env[62405]: DEBUG nova.compute.manager [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] [instance: 46240f5b-c6ab-481b-b20c-80cc727a79f4] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1750.324088] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df17b479-f5ee-4e46-ae3f-9fd3cb5927fe {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.332968] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-898a867a-988d-43a8-81bf-e9e5f56d4c8f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.365421] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0551848-3dbb-4024-b0bc-c54b8262f16e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.373694] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2c75ff0-084d-454c-b30f-cfdfb9a694d7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.389660] env[62405]: DEBUG nova.compute.provider_tree [None req-72b686d3-fd8d-4c4c-81cd-baf36262a0bb tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1750.512308] env[62405]: DEBUG nova.network.neutron [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] [instance: 46240f5b-c6ab-481b-b20c-80cc727a79f4] Successfully created port: 637b6253-d82e-4e82-afe0-7ee5f1d4351f {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1750.697867] env[62405]: INFO nova.compute.manager [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Took 64.97 seconds to build instance. [ 1750.895110] env[62405]: DEBUG nova.scheduler.client.report [None req-72b686d3-fd8d-4c4c-81cd-baf36262a0bb tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1751.200378] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c00fb802-6540-4646-ad2b-94e7ab1842da tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Lock "d186b2f4-3fd1-44be-b8a4-080972aff3a0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 67.465s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1751.311748] env[62405]: DEBUG nova.compute.manager [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] [instance: 46240f5b-c6ab-481b-b20c-80cc727a79f4] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1751.340487] env[62405]: DEBUG nova.virt.hardware [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1751.340814] env[62405]: DEBUG nova.virt.hardware [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1751.341040] env[62405]: DEBUG nova.virt.hardware [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1751.341248] env[62405]: DEBUG nova.virt.hardware [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1751.341404] env[62405]: DEBUG nova.virt.hardware [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1751.341557] env[62405]: DEBUG nova.virt.hardware [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1751.341767] env[62405]: DEBUG nova.virt.hardware [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1751.341927] env[62405]: DEBUG nova.virt.hardware [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1751.342104] env[62405]: DEBUG nova.virt.hardware [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1751.342270] env[62405]: DEBUG nova.virt.hardware [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1751.342443] env[62405]: DEBUG nova.virt.hardware [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1751.343712] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cc8d4ce-71a5-4f8b-894e-b5359fa1c8c1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.354314] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff7959de-4475-449c-a05a-8f73ed067e6f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.400274] env[62405]: DEBUG oslo_concurrency.lockutils [None req-72b686d3-fd8d-4c4c-81cd-baf36262a0bb tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.115s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1751.403382] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 53.206s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1751.404971] env[62405]: INFO nova.compute.claims [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] [instance: 9b21fa71-8a0e-446a-9492-59e2b068237c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1751.425149] env[62405]: INFO nova.scheduler.client.report [None req-72b686d3-fd8d-4c4c-81cd-baf36262a0bb tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Deleted allocations for instance 0f2d81cb-da2a-4664-b9a2-b8c3c38ddc73 [ 1751.702608] env[62405]: DEBUG nova.compute.manager [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: ff8731d6-3c55-4ddc-aeb1-308d72313881] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1751.933419] env[62405]: DEBUG oslo_concurrency.lockutils [None req-72b686d3-fd8d-4c4c-81cd-baf36262a0bb tempest-ServerShowV247Test-1482326983 tempest-ServerShowV247Test-1482326983-project-member] Lock "0f2d81cb-da2a-4664-b9a2-b8c3c38ddc73" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 61.644s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1752.273956] env[62405]: DEBUG oslo_concurrency.lockutils [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1752.349107] env[62405]: DEBUG nova.compute.manager [req-23d3a5e1-ba27-4f15-abd8-448aaa8be2a9 req-83847328-4200-4f7f-b1b0-499eb86ce1bd service nova] [instance: 46240f5b-c6ab-481b-b20c-80cc727a79f4] Received event network-vif-plugged-637b6253-d82e-4e82-afe0-7ee5f1d4351f {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1752.349343] env[62405]: DEBUG oslo_concurrency.lockutils [req-23d3a5e1-ba27-4f15-abd8-448aaa8be2a9 req-83847328-4200-4f7f-b1b0-499eb86ce1bd service nova] Acquiring lock "46240f5b-c6ab-481b-b20c-80cc727a79f4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1752.349659] env[62405]: DEBUG oslo_concurrency.lockutils [req-23d3a5e1-ba27-4f15-abd8-448aaa8be2a9 req-83847328-4200-4f7f-b1b0-499eb86ce1bd service nova] Lock "46240f5b-c6ab-481b-b20c-80cc727a79f4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1752.349763] env[62405]: DEBUG oslo_concurrency.lockutils [req-23d3a5e1-ba27-4f15-abd8-448aaa8be2a9 req-83847328-4200-4f7f-b1b0-499eb86ce1bd service nova] Lock "46240f5b-c6ab-481b-b20c-80cc727a79f4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1752.349898] env[62405]: DEBUG nova.compute.manager [req-23d3a5e1-ba27-4f15-abd8-448aaa8be2a9 req-83847328-4200-4f7f-b1b0-499eb86ce1bd service nova] [instance: 46240f5b-c6ab-481b-b20c-80cc727a79f4] No waiting events found dispatching network-vif-plugged-637b6253-d82e-4e82-afe0-7ee5f1d4351f {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1752.350078] env[62405]: WARNING nova.compute.manager [req-23d3a5e1-ba27-4f15-abd8-448aaa8be2a9 req-83847328-4200-4f7f-b1b0-499eb86ce1bd service nova] [instance: 46240f5b-c6ab-481b-b20c-80cc727a79f4] Received unexpected event network-vif-plugged-637b6253-d82e-4e82-afe0-7ee5f1d4351f for instance with vm_state building and task_state spawning. [ 1752.397101] env[62405]: DEBUG nova.compute.manager [req-16dbf1cf-2ccb-4315-bd03-942d95464a91 req-fa1fbb0e-3088-4c6d-a427-e43281489371 service nova] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Received event network-changed-995727bb-89db-40f7-a02b-916afa2c9641 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1752.397343] env[62405]: DEBUG nova.compute.manager [req-16dbf1cf-2ccb-4315-bd03-942d95464a91 req-fa1fbb0e-3088-4c6d-a427-e43281489371 service nova] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Refreshing instance network info cache due to event network-changed-995727bb-89db-40f7-a02b-916afa2c9641. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1752.397593] env[62405]: DEBUG oslo_concurrency.lockutils [req-16dbf1cf-2ccb-4315-bd03-942d95464a91 req-fa1fbb0e-3088-4c6d-a427-e43281489371 service nova] Acquiring lock "refresh_cache-d186b2f4-3fd1-44be-b8a4-080972aff3a0" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1752.397769] env[62405]: DEBUG oslo_concurrency.lockutils [req-16dbf1cf-2ccb-4315-bd03-942d95464a91 req-fa1fbb0e-3088-4c6d-a427-e43281489371 service nova] Acquired lock "refresh_cache-d186b2f4-3fd1-44be-b8a4-080972aff3a0" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1752.398315] env[62405]: DEBUG nova.network.neutron [req-16dbf1cf-2ccb-4315-bd03-942d95464a91 req-fa1fbb0e-3088-4c6d-a427-e43281489371 service nova] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Refreshing network info cache for port 995727bb-89db-40f7-a02b-916afa2c9641 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1752.422553] env[62405]: DEBUG nova.network.neutron [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] [instance: 46240f5b-c6ab-481b-b20c-80cc727a79f4] Successfully updated port: 637b6253-d82e-4e82-afe0-7ee5f1d4351f {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1752.872360] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdfefd35-954e-4924-a1be-27663a7f2ea5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.884343] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1294e0e-3f33-482c-bbb2-127f0addb2a9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.921340] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1906581e-2dd8-4529-9c5e-ac822457a2cc {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.924375] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Acquiring lock "refresh_cache-46240f5b-c6ab-481b-b20c-80cc727a79f4" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1752.924553] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Acquired lock "refresh_cache-46240f5b-c6ab-481b-b20c-80cc727a79f4" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1752.924858] env[62405]: DEBUG nova.network.neutron [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] [instance: 46240f5b-c6ab-481b-b20c-80cc727a79f4] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1752.933698] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a905cfb-3cf3-4d31-a0a2-5ee5a76ad9ff {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.950224] env[62405]: DEBUG nova.compute.provider_tree [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1753.299991] env[62405]: DEBUG nova.network.neutron [req-16dbf1cf-2ccb-4315-bd03-942d95464a91 req-fa1fbb0e-3088-4c6d-a427-e43281489371 service nova] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Updated VIF entry in instance network info cache for port 995727bb-89db-40f7-a02b-916afa2c9641. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1753.300386] env[62405]: DEBUG nova.network.neutron [req-16dbf1cf-2ccb-4315-bd03-942d95464a91 req-fa1fbb0e-3088-4c6d-a427-e43281489371 service nova] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Updating instance_info_cache with network_info: [{"id": "995727bb-89db-40f7-a02b-916afa2c9641", "address": "fa:16:3e:15:17:ac", "network": {"id": "2019f333-b70a-4976-97ee-8748220e1f48", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-558435229-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.220", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "633b4e729a054bc69593b789af9ee070", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb143ef7-8271-4a8a-a4aa-8eba9a89f6a1", "external-id": "nsx-vlan-transportzone-504", "segmentation_id": 504, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap995727bb-89", "ovs_interfaceid": "995727bb-89db-40f7-a02b-916afa2c9641", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1753.453558] env[62405]: DEBUG nova.scheduler.client.report [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1753.468433] env[62405]: DEBUG nova.network.neutron [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] [instance: 46240f5b-c6ab-481b-b20c-80cc727a79f4] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1753.626897] env[62405]: DEBUG nova.network.neutron [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] [instance: 46240f5b-c6ab-481b-b20c-80cc727a79f4] Updating instance_info_cache with network_info: [{"id": "637b6253-d82e-4e82-afe0-7ee5f1d4351f", "address": "fa:16:3e:29:10:cd", "network": {"id": "9e3e6e3d-df77-428f-b577-e4a42e82ec43", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.208", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "69dc3a146cd14230b1180689e2fea090", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31e77685-b4dd-4810-80ef-24115ea9ea62", "external-id": "nsx-vlan-transportzone-56", "segmentation_id": 56, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap637b6253-d8", "ovs_interfaceid": "637b6253-d82e-4e82-afe0-7ee5f1d4351f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1753.803934] env[62405]: DEBUG oslo_concurrency.lockutils [req-16dbf1cf-2ccb-4315-bd03-942d95464a91 req-fa1fbb0e-3088-4c6d-a427-e43281489371 service nova] Releasing lock "refresh_cache-d186b2f4-3fd1-44be-b8a4-080972aff3a0" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1753.962979] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.560s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1753.963540] env[62405]: DEBUG nova.compute.manager [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] [instance: 9b21fa71-8a0e-446a-9492-59e2b068237c] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1753.966387] env[62405]: DEBUG oslo_concurrency.lockutils [None req-42e76222-99f8-4abe-b058-8b52c8b5f5b9 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 50.437s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1753.966616] env[62405]: DEBUG nova.objects.instance [None req-42e76222-99f8-4abe-b058-8b52c8b5f5b9 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Lazy-loading 'resources' on Instance uuid b4693268-4d12-4c96-a8f9-7b1bb9705c89 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1754.130380] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Releasing lock "refresh_cache-46240f5b-c6ab-481b-b20c-80cc727a79f4" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1754.130760] env[62405]: DEBUG nova.compute.manager [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] [instance: 46240f5b-c6ab-481b-b20c-80cc727a79f4] Instance network_info: |[{"id": "637b6253-d82e-4e82-afe0-7ee5f1d4351f", "address": "fa:16:3e:29:10:cd", "network": {"id": "9e3e6e3d-df77-428f-b577-e4a42e82ec43", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.208", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "69dc3a146cd14230b1180689e2fea090", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31e77685-b4dd-4810-80ef-24115ea9ea62", "external-id": "nsx-vlan-transportzone-56", "segmentation_id": 56, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap637b6253-d8", "ovs_interfaceid": "637b6253-d82e-4e82-afe0-7ee5f1d4351f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1754.131227] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] [instance: 46240f5b-c6ab-481b-b20c-80cc727a79f4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:29:10:cd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '31e77685-b4dd-4810-80ef-24115ea9ea62', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '637b6253-d82e-4e82-afe0-7ee5f1d4351f', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1754.139613] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Creating folder: Project (4a497089c23946fd97e9f5061ef34ff1). Parent ref: group-v401284. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1754.140333] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e310d700-8d4a-4a8a-be9a-bb09259d5189 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.157800] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Created folder: Project (4a497089c23946fd97e9f5061ef34ff1) in parent group-v401284. [ 1754.158035] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Creating folder: Instances. Parent ref: group-v401473. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1754.158311] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4c9efbd7-4bb5-4b90-a185-b445881511bf {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.168498] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Created folder: Instances in parent group-v401473. [ 1754.168749] env[62405]: DEBUG oslo.service.loopingcall [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1754.168942] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 46240f5b-c6ab-481b-b20c-80cc727a79f4] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1754.169171] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5ddc0ab3-a1c0-42e1-bccf-b2985887ea11 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.189587] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1754.189587] env[62405]: value = "task-1947457" [ 1754.189587] env[62405]: _type = "Task" [ 1754.189587] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1754.198596] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947457, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1754.409856] env[62405]: DEBUG nova.compute.manager [req-acc604d8-5cd0-4e98-b80c-fa5c7cd01297 req-c90dd652-4692-4866-b49e-8fe4051867c2 service nova] [instance: 46240f5b-c6ab-481b-b20c-80cc727a79f4] Received event network-changed-637b6253-d82e-4e82-afe0-7ee5f1d4351f {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1754.410113] env[62405]: DEBUG nova.compute.manager [req-acc604d8-5cd0-4e98-b80c-fa5c7cd01297 req-c90dd652-4692-4866-b49e-8fe4051867c2 service nova] [instance: 46240f5b-c6ab-481b-b20c-80cc727a79f4] Refreshing instance network info cache due to event network-changed-637b6253-d82e-4e82-afe0-7ee5f1d4351f. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1754.410474] env[62405]: DEBUG oslo_concurrency.lockutils [req-acc604d8-5cd0-4e98-b80c-fa5c7cd01297 req-c90dd652-4692-4866-b49e-8fe4051867c2 service nova] Acquiring lock "refresh_cache-46240f5b-c6ab-481b-b20c-80cc727a79f4" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1754.410474] env[62405]: DEBUG oslo_concurrency.lockutils [req-acc604d8-5cd0-4e98-b80c-fa5c7cd01297 req-c90dd652-4692-4866-b49e-8fe4051867c2 service nova] Acquired lock "refresh_cache-46240f5b-c6ab-481b-b20c-80cc727a79f4" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1754.410590] env[62405]: DEBUG nova.network.neutron [req-acc604d8-5cd0-4e98-b80c-fa5c7cd01297 req-c90dd652-4692-4866-b49e-8fe4051867c2 service nova] [instance: 46240f5b-c6ab-481b-b20c-80cc727a79f4] Refreshing network info cache for port 637b6253-d82e-4e82-afe0-7ee5f1d4351f {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1754.470669] env[62405]: DEBUG nova.compute.utils [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1754.477274] env[62405]: DEBUG nova.compute.manager [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] [instance: 9b21fa71-8a0e-446a-9492-59e2b068237c] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1754.477274] env[62405]: DEBUG nova.network.neutron [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] [instance: 9b21fa71-8a0e-446a-9492-59e2b068237c] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1754.548807] env[62405]: DEBUG nova.policy [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6c03d02dd18340dd918af29e8a123688', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4eedd384ae594483a29ed0070c3be0f5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1754.702330] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947457, 'name': CreateVM_Task, 'duration_secs': 0.472115} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1754.703029] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 46240f5b-c6ab-481b-b20c-80cc727a79f4] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1754.703195] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1754.703354] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1754.703874] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1754.704073] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-99433ae3-98ba-482f-8e86-07bd24538adc {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.709780] env[62405]: DEBUG oslo_vmware.api [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Waiting for the task: (returnval){ [ 1754.709780] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52e032a4-6a22-9683-e265-5814b9f72791" [ 1754.709780] env[62405]: _type = "Task" [ 1754.709780] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1754.723506] env[62405]: DEBUG oslo_vmware.api [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52e032a4-6a22-9683-e265-5814b9f72791, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1754.975273] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2601c812-c019-4026-86be-e899426c1466 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.979152] env[62405]: DEBUG nova.compute.manager [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] [instance: 9b21fa71-8a0e-446a-9492-59e2b068237c] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1754.991100] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-691b0092-e82f-4aae-b01f-39bfdbf073ab {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.030987] env[62405]: DEBUG nova.network.neutron [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] [instance: 9b21fa71-8a0e-446a-9492-59e2b068237c] Successfully created port: 521b75bc-2756-4546-b0b5-969b4ac5d538 {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1755.036307] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fec2b3a5-6ff2-473f-bbf4-5192e9051dbf {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.046065] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d31e9d9-532b-4340-ba2f-119c533127e9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.062685] env[62405]: DEBUG nova.compute.provider_tree [None req-42e76222-99f8-4abe-b058-8b52c8b5f5b9 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1755.121804] env[62405]: DEBUG oslo_vmware.rw_handles [None req-a8be5974-5301-4735-be46-0a4af2f53cdb tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52030c9f-a913-029a-c38a-fdb7a5ab3962/disk-0.vmdk. {{(pid=62405) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1755.123050] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f18f166b-a672-4628-b489-e3a673d76d67 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.133638] env[62405]: DEBUG oslo_vmware.rw_handles [None req-a8be5974-5301-4735-be46-0a4af2f53cdb tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52030c9f-a913-029a-c38a-fdb7a5ab3962/disk-0.vmdk is in state: ready. {{(pid=62405) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1755.133888] env[62405]: ERROR oslo_vmware.rw_handles [None req-a8be5974-5301-4735-be46-0a4af2f53cdb tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52030c9f-a913-029a-c38a-fdb7a5ab3962/disk-0.vmdk due to incomplete transfer. [ 1755.134154] env[62405]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-140b2c36-4310-4af1-a9e3-260e0a5b555a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.145318] env[62405]: DEBUG oslo_vmware.rw_handles [None req-a8be5974-5301-4735-be46-0a4af2f53cdb tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52030c9f-a913-029a-c38a-fdb7a5ab3962/disk-0.vmdk. {{(pid=62405) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1755.145666] env[62405]: DEBUG nova.virt.vmwareapi.images [None req-a8be5974-5301-4735-be46-0a4af2f53cdb tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 79548471-56f8-410c-a664-d2242541cd2a] Uploaded image 1e2c6626-50b1-4468-a3b7-982412fb92f3 to the Glance image server {{(pid=62405) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1755.148126] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8be5974-5301-4735-be46-0a4af2f53cdb tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 79548471-56f8-410c-a664-d2242541cd2a] Destroying the VM {{(pid=62405) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1755.148410] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-4b7af403-9a9e-4d61-8865-60d0635d5421 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.159162] env[62405]: DEBUG oslo_vmware.api [None req-a8be5974-5301-4735-be46-0a4af2f53cdb tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Waiting for the task: (returnval){ [ 1755.159162] env[62405]: value = "task-1947458" [ 1755.159162] env[62405]: _type = "Task" [ 1755.159162] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1755.170646] env[62405]: DEBUG oslo_vmware.api [None req-a8be5974-5301-4735-be46-0a4af2f53cdb tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947458, 'name': Destroy_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1755.217216] env[62405]: DEBUG nova.network.neutron [req-acc604d8-5cd0-4e98-b80c-fa5c7cd01297 req-c90dd652-4692-4866-b49e-8fe4051867c2 service nova] [instance: 46240f5b-c6ab-481b-b20c-80cc727a79f4] Updated VIF entry in instance network info cache for port 637b6253-d82e-4e82-afe0-7ee5f1d4351f. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1755.217611] env[62405]: DEBUG nova.network.neutron [req-acc604d8-5cd0-4e98-b80c-fa5c7cd01297 req-c90dd652-4692-4866-b49e-8fe4051867c2 service nova] [instance: 46240f5b-c6ab-481b-b20c-80cc727a79f4] Updating instance_info_cache with network_info: [{"id": "637b6253-d82e-4e82-afe0-7ee5f1d4351f", "address": "fa:16:3e:29:10:cd", "network": {"id": "9e3e6e3d-df77-428f-b577-e4a42e82ec43", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.208", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "69dc3a146cd14230b1180689e2fea090", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31e77685-b4dd-4810-80ef-24115ea9ea62", "external-id": "nsx-vlan-transportzone-56", "segmentation_id": 56, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap637b6253-d8", "ovs_interfaceid": "637b6253-d82e-4e82-afe0-7ee5f1d4351f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1755.225845] env[62405]: DEBUG oslo_vmware.api [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52e032a4-6a22-9683-e265-5814b9f72791, 'name': SearchDatastore_Task, 'duration_secs': 0.012792} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1755.226185] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1755.226469] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] [instance: 46240f5b-c6ab-481b-b20c-80cc727a79f4] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1755.226711] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1755.226864] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1755.228729] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1755.228729] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bf4b46ea-99ec-4b9a-8dfb-6b5b760a4c1c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.240141] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1755.240346] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1755.241177] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d2fdddf7-9eb4-4833-b58d-11f4cb09bdff {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.247440] env[62405]: DEBUG oslo_vmware.api [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Waiting for the task: (returnval){ [ 1755.247440] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]526c68f5-6b9d-448b-207b-0e5233bc6e0f" [ 1755.247440] env[62405]: _type = "Task" [ 1755.247440] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1755.255592] env[62405]: DEBUG oslo_vmware.api [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]526c68f5-6b9d-448b-207b-0e5233bc6e0f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1755.567248] env[62405]: DEBUG nova.scheduler.client.report [None req-42e76222-99f8-4abe-b058-8b52c8b5f5b9 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1755.669634] env[62405]: DEBUG oslo_vmware.api [None req-a8be5974-5301-4735-be46-0a4af2f53cdb tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947458, 'name': Destroy_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1755.720829] env[62405]: DEBUG oslo_concurrency.lockutils [req-acc604d8-5cd0-4e98-b80c-fa5c7cd01297 req-c90dd652-4692-4866-b49e-8fe4051867c2 service nova] Releasing lock "refresh_cache-46240f5b-c6ab-481b-b20c-80cc727a79f4" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1755.757480] env[62405]: DEBUG oslo_vmware.api [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]526c68f5-6b9d-448b-207b-0e5233bc6e0f, 'name': SearchDatastore_Task, 'duration_secs': 0.043862} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1755.758243] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-07bad229-253d-4113-bbc2-ad50264f687e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.763230] env[62405]: DEBUG oslo_vmware.api [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Waiting for the task: (returnval){ [ 1755.763230] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52204cd8-1bf5-d66b-b9ba-e33dd59a398a" [ 1755.763230] env[62405]: _type = "Task" [ 1755.763230] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1755.770469] env[62405]: DEBUG oslo_vmware.api [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52204cd8-1bf5-d66b-b9ba-e33dd59a398a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1755.991894] env[62405]: DEBUG nova.compute.manager [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] [instance: 9b21fa71-8a0e-446a-9492-59e2b068237c] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1756.021224] env[62405]: DEBUG nova.virt.hardware [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1756.021603] env[62405]: DEBUG nova.virt.hardware [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1756.021660] env[62405]: DEBUG nova.virt.hardware [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1756.021827] env[62405]: DEBUG nova.virt.hardware [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1756.022188] env[62405]: DEBUG nova.virt.hardware [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1756.022367] env[62405]: DEBUG nova.virt.hardware [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1756.022578] env[62405]: DEBUG nova.virt.hardware [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1756.022742] env[62405]: DEBUG nova.virt.hardware [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1756.023037] env[62405]: DEBUG nova.virt.hardware [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1756.023081] env[62405]: DEBUG nova.virt.hardware [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1756.023252] env[62405]: DEBUG nova.virt.hardware [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1756.024197] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97f51f68-a744-4a79-a498-fc1561e020a9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.033475] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93a4a604-402e-4fb0-b535-f96b9cbdd161 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.072280] env[62405]: DEBUG oslo_concurrency.lockutils [None req-42e76222-99f8-4abe-b058-8b52c8b5f5b9 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.106s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1756.074529] env[62405]: DEBUG oslo_concurrency.lockutils [None req-891c1657-6ad6-4516-8dc8-410e9eead4b6 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 49.447s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1756.074766] env[62405]: DEBUG nova.objects.instance [None req-891c1657-6ad6-4516-8dc8-410e9eead4b6 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Lazy-loading 'resources' on Instance uuid 6c6a3974-c87e-47ed-a025-d6221a8decd7 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1756.097986] env[62405]: INFO nova.scheduler.client.report [None req-42e76222-99f8-4abe-b058-8b52c8b5f5b9 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Deleted allocations for instance b4693268-4d12-4c96-a8f9-7b1bb9705c89 [ 1756.170376] env[62405]: DEBUG oslo_vmware.api [None req-a8be5974-5301-4735-be46-0a4af2f53cdb tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947458, 'name': Destroy_Task} progress is 33%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1756.274672] env[62405]: DEBUG oslo_vmware.api [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52204cd8-1bf5-d66b-b9ba-e33dd59a398a, 'name': SearchDatastore_Task, 'duration_secs': 0.037069} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1756.274853] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1756.275099] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 46240f5b-c6ab-481b-b20c-80cc727a79f4/46240f5b-c6ab-481b-b20c-80cc727a79f4.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1756.275366] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6e63b2e9-cbe1-4837-ad08-49be77bf7e06 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.283037] env[62405]: DEBUG oslo_vmware.api [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Waiting for the task: (returnval){ [ 1756.283037] env[62405]: value = "task-1947459" [ 1756.283037] env[62405]: _type = "Task" [ 1756.283037] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1756.291293] env[62405]: DEBUG oslo_vmware.api [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Task: {'id': task-1947459, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1756.578868] env[62405]: DEBUG nova.objects.instance [None req-891c1657-6ad6-4516-8dc8-410e9eead4b6 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Lazy-loading 'numa_topology' on Instance uuid 6c6a3974-c87e-47ed-a025-d6221a8decd7 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1756.604663] env[62405]: DEBUG oslo_concurrency.lockutils [None req-42e76222-99f8-4abe-b058-8b52c8b5f5b9 tempest-ServersNegativeTestMultiTenantJSON-413040588 tempest-ServersNegativeTestMultiTenantJSON-413040588-project-member] Lock "b4693268-4d12-4c96-a8f9-7b1bb9705c89" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 56.811s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1756.673341] env[62405]: DEBUG oslo_vmware.api [None req-a8be5974-5301-4735-be46-0a4af2f53cdb tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947458, 'name': Destroy_Task, 'duration_secs': 1.187999} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1756.673694] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-a8be5974-5301-4735-be46-0a4af2f53cdb tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 79548471-56f8-410c-a664-d2242541cd2a] Destroyed the VM [ 1756.673965] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-a8be5974-5301-4735-be46-0a4af2f53cdb tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 79548471-56f8-410c-a664-d2242541cd2a] Deleting Snapshot of the VM instance {{(pid=62405) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1756.674252] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-fe361dc7-5135-45c3-b199-62a3e8fab57b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.683393] env[62405]: DEBUG oslo_vmware.api [None req-a8be5974-5301-4735-be46-0a4af2f53cdb tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Waiting for the task: (returnval){ [ 1756.683393] env[62405]: value = "task-1947460" [ 1756.683393] env[62405]: _type = "Task" [ 1756.683393] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1756.694910] env[62405]: DEBUG oslo_vmware.api [None req-a8be5974-5301-4735-be46-0a4af2f53cdb tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947460, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1756.799942] env[62405]: DEBUG oslo_vmware.api [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Task: {'id': task-1947459, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.466739} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1756.800275] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 46240f5b-c6ab-481b-b20c-80cc727a79f4/46240f5b-c6ab-481b-b20c-80cc727a79f4.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1756.800535] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] [instance: 46240f5b-c6ab-481b-b20c-80cc727a79f4] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1756.800853] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-68074b45-d8af-4d33-bac5-170bc43f0f2d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.815805] env[62405]: DEBUG oslo_vmware.api [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Waiting for the task: (returnval){ [ 1756.815805] env[62405]: value = "task-1947461" [ 1756.815805] env[62405]: _type = "Task" [ 1756.815805] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1756.823861] env[62405]: DEBUG oslo_vmware.api [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Task: {'id': task-1947461, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1757.017247] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Acquiring lock "2ab5f28c-1f71-4bea-8733-523e5570f5c6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1757.017511] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Lock "2ab5f28c-1f71-4bea-8733-523e5570f5c6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1757.081470] env[62405]: DEBUG nova.objects.base [None req-891c1657-6ad6-4516-8dc8-410e9eead4b6 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Object Instance<6c6a3974-c87e-47ed-a025-d6221a8decd7> lazy-loaded attributes: resources,numa_topology {{(pid=62405) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1757.196579] env[62405]: DEBUG oslo_vmware.api [None req-a8be5974-5301-4735-be46-0a4af2f53cdb tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947460, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1757.295223] env[62405]: DEBUG nova.network.neutron [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] [instance: 9b21fa71-8a0e-446a-9492-59e2b068237c] Successfully updated port: 521b75bc-2756-4546-b0b5-969b4ac5d538 {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1757.302779] env[62405]: DEBUG nova.compute.manager [req-c8fbfd4f-c5c2-41fb-88af-da125bb3b307 req-855fb0c4-8c3f-4380-a5e2-a58c4a739d61 service nova] [instance: 9b21fa71-8a0e-446a-9492-59e2b068237c] Received event network-vif-plugged-521b75bc-2756-4546-b0b5-969b4ac5d538 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1757.302878] env[62405]: DEBUG oslo_concurrency.lockutils [req-c8fbfd4f-c5c2-41fb-88af-da125bb3b307 req-855fb0c4-8c3f-4380-a5e2-a58c4a739d61 service nova] Acquiring lock "9b21fa71-8a0e-446a-9492-59e2b068237c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1757.303803] env[62405]: DEBUG oslo_concurrency.lockutils [req-c8fbfd4f-c5c2-41fb-88af-da125bb3b307 req-855fb0c4-8c3f-4380-a5e2-a58c4a739d61 service nova] Lock "9b21fa71-8a0e-446a-9492-59e2b068237c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1757.303955] env[62405]: DEBUG oslo_concurrency.lockutils [req-c8fbfd4f-c5c2-41fb-88af-da125bb3b307 req-855fb0c4-8c3f-4380-a5e2-a58c4a739d61 service nova] Lock "9b21fa71-8a0e-446a-9492-59e2b068237c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1757.304184] env[62405]: DEBUG nova.compute.manager [req-c8fbfd4f-c5c2-41fb-88af-da125bb3b307 req-855fb0c4-8c3f-4380-a5e2-a58c4a739d61 service nova] [instance: 9b21fa71-8a0e-446a-9492-59e2b068237c] No waiting events found dispatching network-vif-plugged-521b75bc-2756-4546-b0b5-969b4ac5d538 {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1757.304355] env[62405]: WARNING nova.compute.manager [req-c8fbfd4f-c5c2-41fb-88af-da125bb3b307 req-855fb0c4-8c3f-4380-a5e2-a58c4a739d61 service nova] [instance: 9b21fa71-8a0e-446a-9492-59e2b068237c] Received unexpected event network-vif-plugged-521b75bc-2756-4546-b0b5-969b4ac5d538 for instance with vm_state building and task_state spawning. [ 1757.327138] env[62405]: DEBUG oslo_vmware.api [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Task: {'id': task-1947461, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063291} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1757.327972] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] [instance: 46240f5b-c6ab-481b-b20c-80cc727a79f4] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1757.328812] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-678d6130-4798-4c6b-823c-9d131f6652ea {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.356705] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] [instance: 46240f5b-c6ab-481b-b20c-80cc727a79f4] Reconfiguring VM instance instance-0000003e to attach disk [datastore1] 46240f5b-c6ab-481b-b20c-80cc727a79f4/46240f5b-c6ab-481b-b20c-80cc727a79f4.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1757.359789] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-458dd0e1-4a07-40ec-b93e-b114c6eeb695 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.381694] env[62405]: DEBUG oslo_vmware.api [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Waiting for the task: (returnval){ [ 1757.381694] env[62405]: value = "task-1947462" [ 1757.381694] env[62405]: _type = "Task" [ 1757.381694] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1757.390628] env[62405]: DEBUG oslo_vmware.api [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Task: {'id': task-1947462, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1757.619911] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b698628-a383-48ba-97ef-26ddf7f20595 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.628665] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54f299ef-902a-4d4b-9310-a097931a2acd {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.659674] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bef38e30-8ac2-4db7-bb63-c8e91bffff63 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.667966] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5779580c-a16c-45f3-b4d5-b64c03344827 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.681774] env[62405]: DEBUG nova.compute.provider_tree [None req-891c1657-6ad6-4516-8dc8-410e9eead4b6 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1757.694523] env[62405]: DEBUG oslo_vmware.api [None req-a8be5974-5301-4735-be46-0a4af2f53cdb tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947460, 'name': RemoveSnapshot_Task, 'duration_secs': 0.58721} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1757.694523] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-a8be5974-5301-4735-be46-0a4af2f53cdb tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 79548471-56f8-410c-a664-d2242541cd2a] Deleted Snapshot of the VM instance {{(pid=62405) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1757.694730] env[62405]: INFO nova.compute.manager [None req-a8be5974-5301-4735-be46-0a4af2f53cdb tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 79548471-56f8-410c-a664-d2242541cd2a] Took 16.63 seconds to snapshot the instance on the hypervisor. [ 1757.799864] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Acquiring lock "refresh_cache-9b21fa71-8a0e-446a-9492-59e2b068237c" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1757.799864] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Acquired lock "refresh_cache-9b21fa71-8a0e-446a-9492-59e2b068237c" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1757.799864] env[62405]: DEBUG nova.network.neutron [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] [instance: 9b21fa71-8a0e-446a-9492-59e2b068237c] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1757.895279] env[62405]: DEBUG oslo_vmware.api [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Task: {'id': task-1947462, 'name': ReconfigVM_Task, 'duration_secs': 0.282162} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1757.895279] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] [instance: 46240f5b-c6ab-481b-b20c-80cc727a79f4] Reconfigured VM instance instance-0000003e to attach disk [datastore1] 46240f5b-c6ab-481b-b20c-80cc727a79f4/46240f5b-c6ab-481b-b20c-80cc727a79f4.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1757.896193] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d6a2e0c9-faaa-4427-af19-3a0adfae9947 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.903975] env[62405]: DEBUG oslo_vmware.api [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Waiting for the task: (returnval){ [ 1757.903975] env[62405]: value = "task-1947463" [ 1757.903975] env[62405]: _type = "Task" [ 1757.903975] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1757.913211] env[62405]: DEBUG oslo_vmware.api [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Task: {'id': task-1947463, 'name': Rename_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1758.188098] env[62405]: DEBUG nova.scheduler.client.report [None req-891c1657-6ad6-4516-8dc8-410e9eead4b6 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1758.344985] env[62405]: DEBUG nova.network.neutron [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] [instance: 9b21fa71-8a0e-446a-9492-59e2b068237c] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1758.416089] env[62405]: DEBUG oslo_vmware.api [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Task: {'id': task-1947463, 'name': Rename_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1758.559232] env[62405]: DEBUG nova.network.neutron [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] [instance: 9b21fa71-8a0e-446a-9492-59e2b068237c] Updating instance_info_cache with network_info: [{"id": "521b75bc-2756-4546-b0b5-969b4ac5d538", "address": "fa:16:3e:5c:78:c3", "network": {"id": "0b1f6c51-3b17-4fba-9b3b-b523f0b72cf8", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-932361809-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4eedd384ae594483a29ed0070c3be0f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9f1e0e39-0c84-4fcd-9113-cc528c3eb185", "external-id": "nsx-vlan-transportzone-907", "segmentation_id": 907, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap521b75bc-27", "ovs_interfaceid": "521b75bc-2756-4546-b0b5-969b4ac5d538", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1758.697157] env[62405]: DEBUG oslo_concurrency.lockutils [None req-891c1657-6ad6-4516-8dc8-410e9eead4b6 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.622s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1758.702023] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 48.905s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1758.917629] env[62405]: DEBUG oslo_vmware.api [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Task: {'id': task-1947463, 'name': Rename_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1759.061710] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Releasing lock "refresh_cache-9b21fa71-8a0e-446a-9492-59e2b068237c" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1759.062175] env[62405]: DEBUG nova.compute.manager [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] [instance: 9b21fa71-8a0e-446a-9492-59e2b068237c] Instance network_info: |[{"id": "521b75bc-2756-4546-b0b5-969b4ac5d538", "address": "fa:16:3e:5c:78:c3", "network": {"id": "0b1f6c51-3b17-4fba-9b3b-b523f0b72cf8", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-932361809-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4eedd384ae594483a29ed0070c3be0f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9f1e0e39-0c84-4fcd-9113-cc528c3eb185", "external-id": "nsx-vlan-transportzone-907", "segmentation_id": 907, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap521b75bc-27", "ovs_interfaceid": "521b75bc-2756-4546-b0b5-969b4ac5d538", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1759.062765] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] [instance: 9b21fa71-8a0e-446a-9492-59e2b068237c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5c:78:c3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9f1e0e39-0c84-4fcd-9113-cc528c3eb185', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '521b75bc-2756-4546-b0b5-969b4ac5d538', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1759.074821] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Creating folder: Project (4eedd384ae594483a29ed0070c3be0f5). Parent ref: group-v401284. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1759.075237] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-393b69a7-2100-4da5-bbf3-44e1a39b1ef7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.087893] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Created folder: Project (4eedd384ae594483a29ed0070c3be0f5) in parent group-v401284. [ 1759.088170] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Creating folder: Instances. Parent ref: group-v401476. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1759.088488] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-61d6eb96-2c13-4038-824f-6d224445c4ed {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.100985] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Created folder: Instances in parent group-v401476. [ 1759.101361] env[62405]: DEBUG oslo.service.loopingcall [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1759.101624] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9b21fa71-8a0e-446a-9492-59e2b068237c] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1759.101893] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-18928f07-63f2-40b1-bcc8-a92b2b5cb766 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.128730] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1759.128730] env[62405]: value = "task-1947466" [ 1759.128730] env[62405]: _type = "Task" [ 1759.128730] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1759.138096] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947466, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1759.223063] env[62405]: DEBUG oslo_concurrency.lockutils [None req-891c1657-6ad6-4516-8dc8-410e9eead4b6 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Lock "6c6a3974-c87e-47ed-a025-d6221a8decd7" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 71.972s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1759.224044] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8c58fd55-3089-4e5b-8553-1e3ba26179e5 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Lock "6c6a3974-c87e-47ed-a025-d6221a8decd7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 51.361s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1759.224390] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8c58fd55-3089-4e5b-8553-1e3ba26179e5 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquiring lock "6c6a3974-c87e-47ed-a025-d6221a8decd7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1759.224736] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8c58fd55-3089-4e5b-8553-1e3ba26179e5 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Lock "6c6a3974-c87e-47ed-a025-d6221a8decd7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1759.228871] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8c58fd55-3089-4e5b-8553-1e3ba26179e5 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Lock "6c6a3974-c87e-47ed-a025-d6221a8decd7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1759.230766] env[62405]: INFO nova.compute.manager [None req-8c58fd55-3089-4e5b-8553-1e3ba26179e5 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 6c6a3974-c87e-47ed-a025-d6221a8decd7] Terminating instance [ 1759.359648] env[62405]: DEBUG nova.compute.manager [req-a284ac42-48b6-48e8-880d-87e3cd0f6251 req-27777290-ba67-4e0a-a7ca-b96afa6930d4 service nova] [instance: 9b21fa71-8a0e-446a-9492-59e2b068237c] Received event network-changed-521b75bc-2756-4546-b0b5-969b4ac5d538 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1759.359648] env[62405]: DEBUG nova.compute.manager [req-a284ac42-48b6-48e8-880d-87e3cd0f6251 req-27777290-ba67-4e0a-a7ca-b96afa6930d4 service nova] [instance: 9b21fa71-8a0e-446a-9492-59e2b068237c] Refreshing instance network info cache due to event network-changed-521b75bc-2756-4546-b0b5-969b4ac5d538. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1759.359796] env[62405]: DEBUG oslo_concurrency.lockutils [req-a284ac42-48b6-48e8-880d-87e3cd0f6251 req-27777290-ba67-4e0a-a7ca-b96afa6930d4 service nova] Acquiring lock "refresh_cache-9b21fa71-8a0e-446a-9492-59e2b068237c" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1759.359880] env[62405]: DEBUG oslo_concurrency.lockutils [req-a284ac42-48b6-48e8-880d-87e3cd0f6251 req-27777290-ba67-4e0a-a7ca-b96afa6930d4 service nova] Acquired lock "refresh_cache-9b21fa71-8a0e-446a-9492-59e2b068237c" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1759.360425] env[62405]: DEBUG nova.network.neutron [req-a284ac42-48b6-48e8-880d-87e3cd0f6251 req-27777290-ba67-4e0a-a7ca-b96afa6930d4 service nova] [instance: 9b21fa71-8a0e-446a-9492-59e2b068237c] Refreshing network info cache for port 521b75bc-2756-4546-b0b5-969b4ac5d538 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1759.419934] env[62405]: DEBUG oslo_vmware.api [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Task: {'id': task-1947463, 'name': Rename_Task, 'duration_secs': 1.158921} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1759.420240] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] [instance: 46240f5b-c6ab-481b-b20c-80cc727a79f4] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1759.420511] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e9d75c17-fd5e-4a35-8182-af5f33000d7b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.429635] env[62405]: DEBUG oslo_vmware.api [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Waiting for the task: (returnval){ [ 1759.429635] env[62405]: value = "task-1947467" [ 1759.429635] env[62405]: _type = "Task" [ 1759.429635] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1759.442717] env[62405]: DEBUG oslo_vmware.api [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Task: {'id': task-1947467, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1759.639824] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947466, 'name': CreateVM_Task, 'duration_secs': 0.465554} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1759.639992] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9b21fa71-8a0e-446a-9492-59e2b068237c] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1759.640732] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1759.640942] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1759.641309] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1759.641594] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f76640ff-d475-49e0-b0e5-98586380a52a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.647082] env[62405]: DEBUG oslo_vmware.api [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Waiting for the task: (returnval){ [ 1759.647082] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]529dce23-8721-1b6a-ce65-ea9bb314d082" [ 1759.647082] env[62405]: _type = "Task" [ 1759.647082] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1759.656334] env[62405]: DEBUG oslo_vmware.api [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]529dce23-8721-1b6a-ce65-ea9bb314d082, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1759.724421] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Applying migration context for instance 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3 as it has an incoming, in-progress migration 2e9f9f5c-75f9-4fbb-a793-3dac9f3417c4. Migration status is reverting {{(pid=62405) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1759.725702] env[62405]: INFO nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Updating resource usage from migration 2e9f9f5c-75f9-4fbb-a793-3dac9f3417c4 [ 1759.726105] env[62405]: INFO nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: a1a84837-deef-4ffc-8a47-4891bfc2c87a] Updating resource usage from migration b4c9b590-842e-4bbd-bf8c-7c1854c857a2 [ 1759.726268] env[62405]: INFO nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Updating resource usage from migration f4e1eabb-c8ee-4e3c-b80a-8f1b540ce872 [ 1759.737681] env[62405]: DEBUG nova.compute.manager [None req-8c58fd55-3089-4e5b-8553-1e3ba26179e5 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 6c6a3974-c87e-47ed-a025-d6221a8decd7] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1759.737882] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-8c58fd55-3089-4e5b-8553-1e3ba26179e5 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 6c6a3974-c87e-47ed-a025-d6221a8decd7] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1759.738256] env[62405]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-386196f2-cb57-4baf-8bca-5082d57ab299 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.748564] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72743a11-0855-4e79-bac2-916ced7ef85a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.759837] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 777ddb84-25b9-4da6-be6b-a2289dbf510a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1759.760011] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 058682a1-5240-4414-9203-c612ecd12999 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1759.760181] env[62405]: WARNING nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 78b4c6ea-6f5b-40d8-8c4a-10332f176e0b is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1759.760318] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 67bf25ea-5774-4246-a3e6-2aeb0ebf6731 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1759.760445] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 4d59d9fd-23df-4933-97ed-32602e51e9aa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1759.760551] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 34ec55c6-1a7a-4ffa-8efd-9eedd7495d44 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 2, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1759.760666] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Migration 2e9f9f5c-75f9-4fbb-a793-3dac9f3417c4 is active on this compute host and has allocations in placement: {'resources': {'VCPU': 1, 'MEMORY_MB': 192, 'DISK_GB': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1759.760792] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1759.760926] env[62405]: WARNING nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance aae3abca-951a-4149-9ccb-d70bea218aea is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1759.761066] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 2c623c00-92f2-4cc4-8503-963c3308d708 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1759.761186] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 4c8c0d2f-d8d3-4422-8a5c-8999636b22be actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1759.761381] env[62405]: WARNING nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 9b495caf-4394-40c0-b68f-d02c7d759a6a is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1759.761460] env[62405]: WARNING nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance dbb5dda5-5420-4d7b-8b32-152d51cb2fb9 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1759.761518] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 79548471-56f8-410c-a664-d2242541cd2a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1759.761631] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance d186b2f4-3fd1-44be-b8a4-080972aff3a0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1759.761733] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 46240f5b-c6ab-481b-b20c-80cc727a79f4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1759.761842] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 9b21fa71-8a0e-446a-9492-59e2b068237c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1759.788630] env[62405]: WARNING nova.virt.vmwareapi.vmops [None req-8c58fd55-3089-4e5b-8553-1e3ba26179e5 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 6c6a3974-c87e-47ed-a025-d6221a8decd7] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 6c6a3974-c87e-47ed-a025-d6221a8decd7 could not be found. [ 1759.788869] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-8c58fd55-3089-4e5b-8553-1e3ba26179e5 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 6c6a3974-c87e-47ed-a025-d6221a8decd7] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1759.789573] env[62405]: INFO nova.compute.manager [None req-8c58fd55-3089-4e5b-8553-1e3ba26179e5 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 6c6a3974-c87e-47ed-a025-d6221a8decd7] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1759.789869] env[62405]: DEBUG oslo.service.loopingcall [None req-8c58fd55-3089-4e5b-8553-1e3ba26179e5 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1759.790192] env[62405]: DEBUG nova.compute.manager [-] [instance: 6c6a3974-c87e-47ed-a025-d6221a8decd7] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1759.790323] env[62405]: DEBUG nova.network.neutron [-] [instance: 6c6a3974-c87e-47ed-a025-d6221a8decd7] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1759.944155] env[62405]: DEBUG oslo_vmware.api [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Task: {'id': task-1947467, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1760.114132] env[62405]: DEBUG nova.network.neutron [req-a284ac42-48b6-48e8-880d-87e3cd0f6251 req-27777290-ba67-4e0a-a7ca-b96afa6930d4 service nova] [instance: 9b21fa71-8a0e-446a-9492-59e2b068237c] Updated VIF entry in instance network info cache for port 521b75bc-2756-4546-b0b5-969b4ac5d538. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1760.114132] env[62405]: DEBUG nova.network.neutron [req-a284ac42-48b6-48e8-880d-87e3cd0f6251 req-27777290-ba67-4e0a-a7ca-b96afa6930d4 service nova] [instance: 9b21fa71-8a0e-446a-9492-59e2b068237c] Updating instance_info_cache with network_info: [{"id": "521b75bc-2756-4546-b0b5-969b4ac5d538", "address": "fa:16:3e:5c:78:c3", "network": {"id": "0b1f6c51-3b17-4fba-9b3b-b523f0b72cf8", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-932361809-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4eedd384ae594483a29ed0070c3be0f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9f1e0e39-0c84-4fcd-9113-cc528c3eb185", "external-id": "nsx-vlan-transportzone-907", "segmentation_id": 907, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap521b75bc-27", "ovs_interfaceid": "521b75bc-2756-4546-b0b5-969b4ac5d538", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1760.158482] env[62405]: DEBUG oslo_vmware.api [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]529dce23-8721-1b6a-ce65-ea9bb314d082, 'name': SearchDatastore_Task, 'duration_secs': 0.021296} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1760.158924] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1760.159128] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] [instance: 9b21fa71-8a0e-446a-9492-59e2b068237c] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1760.159399] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1760.159606] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1760.159791] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1760.160076] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8b27432a-701e-43cb-9d88-1fd05aa2cbf4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.187896] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1760.187896] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1760.187896] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1f1a5b8b-db31-46ac-a194-ca4e3eca4b96 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.193312] env[62405]: DEBUG oslo_vmware.api [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Waiting for the task: (returnval){ [ 1760.193312] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]523da0db-5337-7547-7107-edfaa92ea764" [ 1760.193312] env[62405]: _type = "Task" [ 1760.193312] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1760.202750] env[62405]: DEBUG oslo_vmware.api [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]523da0db-5337-7547-7107-edfaa92ea764, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1760.265203] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 742c8d94-48d1-4408-91dc-98f25661aa8d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1760.440813] env[62405]: DEBUG oslo_vmware.api [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Task: {'id': task-1947467, 'name': PowerOnVM_Task, 'duration_secs': 0.528962} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1760.441056] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] [instance: 46240f5b-c6ab-481b-b20c-80cc727a79f4] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1760.441320] env[62405]: INFO nova.compute.manager [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] [instance: 46240f5b-c6ab-481b-b20c-80cc727a79f4] Took 9.13 seconds to spawn the instance on the hypervisor. [ 1760.441526] env[62405]: DEBUG nova.compute.manager [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] [instance: 46240f5b-c6ab-481b-b20c-80cc727a79f4] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1760.442292] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82bc839a-1a75-4b2a-94c5-93fc13202c38 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.595037] env[62405]: DEBUG nova.network.neutron [-] [instance: 6c6a3974-c87e-47ed-a025-d6221a8decd7] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1760.616879] env[62405]: DEBUG oslo_concurrency.lockutils [req-a284ac42-48b6-48e8-880d-87e3cd0f6251 req-27777290-ba67-4e0a-a7ca-b96afa6930d4 service nova] Releasing lock "refresh_cache-9b21fa71-8a0e-446a-9492-59e2b068237c" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1760.704163] env[62405]: DEBUG oslo_vmware.api [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]523da0db-5337-7547-7107-edfaa92ea764, 'name': SearchDatastore_Task, 'duration_secs': 0.009418} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1760.704939] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9aff5d32-734f-4b9d-880f-3bd3e2260971 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.710204] env[62405]: DEBUG oslo_vmware.api [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Waiting for the task: (returnval){ [ 1760.710204] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52e1c268-b0cf-fb46-6280-6e683e0db8cc" [ 1760.710204] env[62405]: _type = "Task" [ 1760.710204] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1760.717858] env[62405]: DEBUG oslo_vmware.api [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52e1c268-b0cf-fb46-6280-6e683e0db8cc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1760.768441] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance f410acd2-f786-43bd-ad60-0a6248dedb1c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1760.961663] env[62405]: INFO nova.compute.manager [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] [instance: 46240f5b-c6ab-481b-b20c-80cc727a79f4] Took 67.76 seconds to build instance. [ 1761.098100] env[62405]: INFO nova.compute.manager [-] [instance: 6c6a3974-c87e-47ed-a025-d6221a8decd7] Took 1.31 seconds to deallocate network for instance. [ 1761.221636] env[62405]: DEBUG oslo_vmware.api [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52e1c268-b0cf-fb46-6280-6e683e0db8cc, 'name': SearchDatastore_Task, 'duration_secs': 0.009489} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1761.221920] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1761.222209] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 9b21fa71-8a0e-446a-9492-59e2b068237c/9b21fa71-8a0e-446a-9492-59e2b068237c.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1761.222472] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3294b15e-7347-489f-bedf-a9d143484f5d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.229418] env[62405]: DEBUG oslo_vmware.api [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Waiting for the task: (returnval){ [ 1761.229418] env[62405]: value = "task-1947468" [ 1761.229418] env[62405]: _type = "Task" [ 1761.229418] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1761.237621] env[62405]: DEBUG oslo_vmware.api [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Task: {'id': task-1947468, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1761.270990] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 7256b956-e41a-40ec-a687-a129a8bafcb6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1761.463667] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d0b72d29-c466-4820-b5c2-37d88e2b638a tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Lock "46240f5b-c6ab-481b-b20c-80cc727a79f4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 69.275s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1761.565692] env[62405]: DEBUG oslo_concurrency.lockutils [None req-90f1c58c-06d1-4fd1-ae5d-5225a87d330f tempest-DeleteServersAdminTestJSON-2054241673 tempest-DeleteServersAdminTestJSON-2054241673-project-admin] Acquiring lock "46240f5b-c6ab-481b-b20c-80cc727a79f4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1761.566545] env[62405]: DEBUG oslo_concurrency.lockutils [None req-90f1c58c-06d1-4fd1-ae5d-5225a87d330f tempest-DeleteServersAdminTestJSON-2054241673 tempest-DeleteServersAdminTestJSON-2054241673-project-admin] Lock "46240f5b-c6ab-481b-b20c-80cc727a79f4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1761.566545] env[62405]: DEBUG oslo_concurrency.lockutils [None req-90f1c58c-06d1-4fd1-ae5d-5225a87d330f tempest-DeleteServersAdminTestJSON-2054241673 tempest-DeleteServersAdminTestJSON-2054241673-project-admin] Acquiring lock "46240f5b-c6ab-481b-b20c-80cc727a79f4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1761.566545] env[62405]: DEBUG oslo_concurrency.lockutils [None req-90f1c58c-06d1-4fd1-ae5d-5225a87d330f tempest-DeleteServersAdminTestJSON-2054241673 tempest-DeleteServersAdminTestJSON-2054241673-project-admin] Lock "46240f5b-c6ab-481b-b20c-80cc727a79f4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1761.566730] env[62405]: DEBUG oslo_concurrency.lockutils [None req-90f1c58c-06d1-4fd1-ae5d-5225a87d330f tempest-DeleteServersAdminTestJSON-2054241673 tempest-DeleteServersAdminTestJSON-2054241673-project-admin] Lock "46240f5b-c6ab-481b-b20c-80cc727a79f4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1761.569056] env[62405]: INFO nova.compute.manager [None req-90f1c58c-06d1-4fd1-ae5d-5225a87d330f tempest-DeleteServersAdminTestJSON-2054241673 tempest-DeleteServersAdminTestJSON-2054241673-project-admin] [instance: 46240f5b-c6ab-481b-b20c-80cc727a79f4] Terminating instance [ 1761.741984] env[62405]: DEBUG oslo_vmware.api [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Task: {'id': task-1947468, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1761.773946] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 9aa9e0de-7314-4d8b-8e9f-b6d330cae914 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1761.970020] env[62405]: DEBUG nova.compute.manager [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1762.072492] env[62405]: DEBUG nova.compute.manager [None req-90f1c58c-06d1-4fd1-ae5d-5225a87d330f tempest-DeleteServersAdminTestJSON-2054241673 tempest-DeleteServersAdminTestJSON-2054241673-project-admin] [instance: 46240f5b-c6ab-481b-b20c-80cc727a79f4] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1762.072729] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-90f1c58c-06d1-4fd1-ae5d-5225a87d330f tempest-DeleteServersAdminTestJSON-2054241673 tempest-DeleteServersAdminTestJSON-2054241673-project-admin] [instance: 46240f5b-c6ab-481b-b20c-80cc727a79f4] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1762.073610] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd8f7c82-c1f6-452f-8075-16e672bedf98 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.081769] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-90f1c58c-06d1-4fd1-ae5d-5225a87d330f tempest-DeleteServersAdminTestJSON-2054241673 tempest-DeleteServersAdminTestJSON-2054241673-project-admin] [instance: 46240f5b-c6ab-481b-b20c-80cc727a79f4] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1762.082012] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5be17d44-a439-4406-9272-ec950b639cf0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.088818] env[62405]: DEBUG oslo_vmware.api [None req-90f1c58c-06d1-4fd1-ae5d-5225a87d330f tempest-DeleteServersAdminTestJSON-2054241673 tempest-DeleteServersAdminTestJSON-2054241673-project-admin] Waiting for the task: (returnval){ [ 1762.088818] env[62405]: value = "task-1947469" [ 1762.088818] env[62405]: _type = "Task" [ 1762.088818] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1762.097191] env[62405]: DEBUG oslo_vmware.api [None req-90f1c58c-06d1-4fd1-ae5d-5225a87d330f tempest-DeleteServersAdminTestJSON-2054241673 tempest-DeleteServersAdminTestJSON-2054241673-project-admin] Task: {'id': task-1947469, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1762.127224] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8c58fd55-3089-4e5b-8553-1e3ba26179e5 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Lock "6c6a3974-c87e-47ed-a025-d6221a8decd7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.903s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1762.241309] env[62405]: DEBUG oslo_vmware.api [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Task: {'id': task-1947468, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1762.277064] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 48554024-9b6f-44be-b21e-615b25cd790c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1762.485775] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1762.600692] env[62405]: DEBUG oslo_vmware.api [None req-90f1c58c-06d1-4fd1-ae5d-5225a87d330f tempest-DeleteServersAdminTestJSON-2054241673 tempest-DeleteServersAdminTestJSON-2054241673-project-admin] Task: {'id': task-1947469, 'name': PowerOffVM_Task, 'duration_secs': 0.209206} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1762.600692] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-90f1c58c-06d1-4fd1-ae5d-5225a87d330f tempest-DeleteServersAdminTestJSON-2054241673 tempest-DeleteServersAdminTestJSON-2054241673-project-admin] [instance: 46240f5b-c6ab-481b-b20c-80cc727a79f4] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1762.600692] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-90f1c58c-06d1-4fd1-ae5d-5225a87d330f tempest-DeleteServersAdminTestJSON-2054241673 tempest-DeleteServersAdminTestJSON-2054241673-project-admin] [instance: 46240f5b-c6ab-481b-b20c-80cc727a79f4] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1762.600692] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-85b9194e-88f8-41ed-b681-e22c08a0f930 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.687916] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-90f1c58c-06d1-4fd1-ae5d-5225a87d330f tempest-DeleteServersAdminTestJSON-2054241673 tempest-DeleteServersAdminTestJSON-2054241673-project-admin] [instance: 46240f5b-c6ab-481b-b20c-80cc727a79f4] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1762.688171] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-90f1c58c-06d1-4fd1-ae5d-5225a87d330f tempest-DeleteServersAdminTestJSON-2054241673 tempest-DeleteServersAdminTestJSON-2054241673-project-admin] [instance: 46240f5b-c6ab-481b-b20c-80cc727a79f4] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1762.688357] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-90f1c58c-06d1-4fd1-ae5d-5225a87d330f tempest-DeleteServersAdminTestJSON-2054241673 tempest-DeleteServersAdminTestJSON-2054241673-project-admin] Deleting the datastore file [datastore1] 46240f5b-c6ab-481b-b20c-80cc727a79f4 {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1762.688636] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-216b9afe-9409-4e73-8334-749b93faefee {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.696309] env[62405]: DEBUG oslo_vmware.api [None req-90f1c58c-06d1-4fd1-ae5d-5225a87d330f tempest-DeleteServersAdminTestJSON-2054241673 tempest-DeleteServersAdminTestJSON-2054241673-project-admin] Waiting for the task: (returnval){ [ 1762.696309] env[62405]: value = "task-1947471" [ 1762.696309] env[62405]: _type = "Task" [ 1762.696309] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1762.704782] env[62405]: DEBUG oslo_vmware.api [None req-90f1c58c-06d1-4fd1-ae5d-5225a87d330f tempest-DeleteServersAdminTestJSON-2054241673 tempest-DeleteServersAdminTestJSON-2054241673-project-admin] Task: {'id': task-1947471, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1762.741425] env[62405]: DEBUG oslo_vmware.api [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Task: {'id': task-1947468, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.470425} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1762.741700] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 9b21fa71-8a0e-446a-9492-59e2b068237c/9b21fa71-8a0e-446a-9492-59e2b068237c.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1762.742030] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] [instance: 9b21fa71-8a0e-446a-9492-59e2b068237c] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1762.742183] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ac24e422-1ad7-42eb-9303-f9589416bd37 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.751045] env[62405]: DEBUG oslo_vmware.api [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Waiting for the task: (returnval){ [ 1762.751045] env[62405]: value = "task-1947472" [ 1762.751045] env[62405]: _type = "Task" [ 1762.751045] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1762.759764] env[62405]: DEBUG oslo_vmware.api [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Task: {'id': task-1947472, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1762.764758] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquiring lock "af174cbf-3555-42b0-bacd-033f9ff46f08" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1762.765048] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Lock "af174cbf-3555-42b0-bacd-033f9ff46f08" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1762.780672] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 153adb6e-5381-4e91-881e-8e566a16905a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1763.207021] env[62405]: DEBUG oslo_vmware.api [None req-90f1c58c-06d1-4fd1-ae5d-5225a87d330f tempest-DeleteServersAdminTestJSON-2054241673 tempest-DeleteServersAdminTestJSON-2054241673-project-admin] Task: {'id': task-1947471, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.235208} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1763.207255] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-90f1c58c-06d1-4fd1-ae5d-5225a87d330f tempest-DeleteServersAdminTestJSON-2054241673 tempest-DeleteServersAdminTestJSON-2054241673-project-admin] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1763.207445] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-90f1c58c-06d1-4fd1-ae5d-5225a87d330f tempest-DeleteServersAdminTestJSON-2054241673 tempest-DeleteServersAdminTestJSON-2054241673-project-admin] [instance: 46240f5b-c6ab-481b-b20c-80cc727a79f4] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1763.207625] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-90f1c58c-06d1-4fd1-ae5d-5225a87d330f tempest-DeleteServersAdminTestJSON-2054241673 tempest-DeleteServersAdminTestJSON-2054241673-project-admin] [instance: 46240f5b-c6ab-481b-b20c-80cc727a79f4] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1763.207801] env[62405]: INFO nova.compute.manager [None req-90f1c58c-06d1-4fd1-ae5d-5225a87d330f tempest-DeleteServersAdminTestJSON-2054241673 tempest-DeleteServersAdminTestJSON-2054241673-project-admin] [instance: 46240f5b-c6ab-481b-b20c-80cc727a79f4] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1763.208050] env[62405]: DEBUG oslo.service.loopingcall [None req-90f1c58c-06d1-4fd1-ae5d-5225a87d330f tempest-DeleteServersAdminTestJSON-2054241673 tempest-DeleteServersAdminTestJSON-2054241673-project-admin] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1763.208268] env[62405]: DEBUG nova.compute.manager [-] [instance: 46240f5b-c6ab-481b-b20c-80cc727a79f4] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1763.208429] env[62405]: DEBUG nova.network.neutron [-] [instance: 46240f5b-c6ab-481b-b20c-80cc727a79f4] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1763.260813] env[62405]: DEBUG oslo_vmware.api [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Task: {'id': task-1947472, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.34483} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1763.261737] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] [instance: 9b21fa71-8a0e-446a-9492-59e2b068237c] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1763.261849] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2ea4c95-650c-47fe-bcc0-daadf4a6c028 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.285785] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] [instance: 9b21fa71-8a0e-446a-9492-59e2b068237c] Reconfiguring VM instance instance-0000003f to attach disk [datastore1] 9b21fa71-8a0e-446a-9492-59e2b068237c/9b21fa71-8a0e-446a-9492-59e2b068237c.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1763.286921] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 9d97bf1d-6830-48b1-831b-bf2b52188f32 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1763.288091] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6ae23688-ee6a-40cd-a9e3-148a8a7b1d99 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.304587] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance ff8731d6-3c55-4ddc-aeb1-308d72313881 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1763.304770] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Migration b4c9b590-842e-4bbd-bf8c-7c1854c857a2 is active on this compute host and has allocations in placement: {'resources': {'VCPU': 1, 'MEMORY_MB': 192, 'DISK_GB': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1763.304863] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance a1a84837-deef-4ffc-8a47-4891bfc2c87a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1763.304988] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Migration f4e1eabb-c8ee-4e3c-b80a-8f1b540ce872 is active on this compute host and has allocations in placement: {'resources': {'VCPU': 1, 'MEMORY_MB': 192, 'DISK_GB': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1763.305178] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 15218373-ffa5-49ce-b604-423b7fc5fb35 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1763.313343] env[62405]: DEBUG oslo_vmware.api [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Waiting for the task: (returnval){ [ 1763.313343] env[62405]: value = "task-1947473" [ 1763.313343] env[62405]: _type = "Task" [ 1763.313343] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1763.323317] env[62405]: DEBUG oslo_vmware.api [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Task: {'id': task-1947473, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1763.460893] env[62405]: DEBUG nova.compute.manager [req-da7bdedc-32d4-4f27-9117-0233f47ca12b req-9c7cd1d4-0f46-476f-a660-b40850ddbf84 service nova] [instance: 46240f5b-c6ab-481b-b20c-80cc727a79f4] Received event network-vif-deleted-637b6253-d82e-4e82-afe0-7ee5f1d4351f {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1763.461051] env[62405]: INFO nova.compute.manager [req-da7bdedc-32d4-4f27-9117-0233f47ca12b req-9c7cd1d4-0f46-476f-a660-b40850ddbf84 service nova] [instance: 46240f5b-c6ab-481b-b20c-80cc727a79f4] Neutron deleted interface 637b6253-d82e-4e82-afe0-7ee5f1d4351f; detaching it from the instance and deleting it from the info cache [ 1763.461225] env[62405]: DEBUG nova.network.neutron [req-da7bdedc-32d4-4f27-9117-0233f47ca12b req-9c7cd1d4-0f46-476f-a660-b40850ddbf84 service nova] [instance: 46240f5b-c6ab-481b-b20c-80cc727a79f4] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1763.808034] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance f16e3d13-6db6-4f61-b0e4-661856a9166b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1763.824074] env[62405]: DEBUG oslo_vmware.api [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Task: {'id': task-1947473, 'name': ReconfigVM_Task, 'duration_secs': 0.275032} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1763.824074] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] [instance: 9b21fa71-8a0e-446a-9492-59e2b068237c] Reconfigured VM instance instance-0000003f to attach disk [datastore1] 9b21fa71-8a0e-446a-9492-59e2b068237c/9b21fa71-8a0e-446a-9492-59e2b068237c.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1763.824468] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4534a268-4d53-4d28-88d7-183f5fb6a092 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.831223] env[62405]: DEBUG oslo_vmware.api [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Waiting for the task: (returnval){ [ 1763.831223] env[62405]: value = "task-1947474" [ 1763.831223] env[62405]: _type = "Task" [ 1763.831223] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1763.839500] env[62405]: DEBUG oslo_vmware.api [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Task: {'id': task-1947474, 'name': Rename_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1763.939068] env[62405]: DEBUG nova.network.neutron [-] [instance: 46240f5b-c6ab-481b-b20c-80cc727a79f4] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1763.964446] env[62405]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-db61d65d-7273-49fd-931e-340f589bb3fa {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.976182] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ac972bc-d965-4845-b2a9-b2b51d135af4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.019890] env[62405]: DEBUG nova.compute.manager [req-da7bdedc-32d4-4f27-9117-0233f47ca12b req-9c7cd1d4-0f46-476f-a660-b40850ddbf84 service nova] [instance: 46240f5b-c6ab-481b-b20c-80cc727a79f4] Detach interface failed, port_id=637b6253-d82e-4e82-afe0-7ee5f1d4351f, reason: Instance 46240f5b-c6ab-481b-b20c-80cc727a79f4 could not be found. {{(pid=62405) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11483}} [ 1764.311131] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 2ab5f28c-1f71-4bea-8733-523e5570f5c6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1764.311458] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Total usable vcpus: 48, total allocated vcpus: 15 {{(pid=62405) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1764.311613] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3520MB phys_disk=200GB used_disk=16GB total_vcpus=48 used_vcpus=15 pci_stats=[] {{(pid=62405) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1764.346096] env[62405]: DEBUG oslo_vmware.api [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Task: {'id': task-1947474, 'name': Rename_Task, 'duration_secs': 0.142216} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1764.346395] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] [instance: 9b21fa71-8a0e-446a-9492-59e2b068237c] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1764.346639] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4f4751c5-3f53-41bb-b204-2d0580ff33fd {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.358771] env[62405]: DEBUG oslo_vmware.api [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Waiting for the task: (returnval){ [ 1764.358771] env[62405]: value = "task-1947475" [ 1764.358771] env[62405]: _type = "Task" [ 1764.358771] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1764.368645] env[62405]: DEBUG oslo_vmware.api [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Task: {'id': task-1947475, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1764.443658] env[62405]: INFO nova.compute.manager [-] [instance: 46240f5b-c6ab-481b-b20c-80cc727a79f4] Took 1.24 seconds to deallocate network for instance. [ 1764.727645] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d17a4e5b-edea-48e2-815b-11e28854a124 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.735979] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ae63575-d1e1-4de9-8eb1-947d52022c61 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.768271] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baa70638-9553-497a-a56b-2c1527742588 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.776496] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bc59027-cad3-4990-b306-780f3d15a1d5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.790650] env[62405]: DEBUG nova.compute.provider_tree [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1764.868622] env[62405]: DEBUG oslo_vmware.api [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Task: {'id': task-1947475, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1764.952409] env[62405]: DEBUG oslo_concurrency.lockutils [None req-90f1c58c-06d1-4fd1-ae5d-5225a87d330f tempest-DeleteServersAdminTestJSON-2054241673 tempest-DeleteServersAdminTestJSON-2054241673-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1765.295411] env[62405]: DEBUG nova.scheduler.client.report [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1765.369503] env[62405]: DEBUG oslo_vmware.api [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Task: {'id': task-1947475, 'name': PowerOnVM_Task, 'duration_secs': 0.654565} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1765.369633] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] [instance: 9b21fa71-8a0e-446a-9492-59e2b068237c] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1765.369730] env[62405]: INFO nova.compute.manager [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] [instance: 9b21fa71-8a0e-446a-9492-59e2b068237c] Took 9.38 seconds to spawn the instance on the hypervisor. [ 1765.369910] env[62405]: DEBUG nova.compute.manager [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] [instance: 9b21fa71-8a0e-446a-9492-59e2b068237c] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1765.370680] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0173d87-3068-4114-9ad6-530ee61c0881 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.800667] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62405) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1765.802020] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 7.101s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1765.802020] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4ab447f8-448f-431a-bdc9-4891d949a542 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 52.831s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1765.802842] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1765.803440] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Cleaning up deleted instances {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11746}} [ 1765.893108] env[62405]: INFO nova.compute.manager [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] [instance: 9b21fa71-8a0e-446a-9492-59e2b068237c] Took 67.71 seconds to build instance. [ 1766.305153] env[62405]: DEBUG nova.objects.instance [None req-4ab447f8-448f-431a-bdc9-4891d949a542 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Lazy-loading 'migration_context' on Instance uuid 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1766.316983] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] There are 44 instances to clean {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11755}} [ 1766.317297] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: b4693268-4d12-4c96-a8f9-7b1bb9705c89] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 1766.395521] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8f5ba8d7-65c2-47a4-a0d1-665f095bf685 tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Lock "9b21fa71-8a0e-446a-9492-59e2b068237c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 73.752s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1766.412902] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7aa1a96d-62c9-4103-852f-07c7e30ac76e tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Acquiring lock "9b21fa71-8a0e-446a-9492-59e2b068237c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1766.413203] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7aa1a96d-62c9-4103-852f-07c7e30ac76e tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Lock "9b21fa71-8a0e-446a-9492-59e2b068237c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1766.413776] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7aa1a96d-62c9-4103-852f-07c7e30ac76e tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Acquiring lock "9b21fa71-8a0e-446a-9492-59e2b068237c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1766.413976] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7aa1a96d-62c9-4103-852f-07c7e30ac76e tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Lock "9b21fa71-8a0e-446a-9492-59e2b068237c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1766.414186] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7aa1a96d-62c9-4103-852f-07c7e30ac76e tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Lock "9b21fa71-8a0e-446a-9492-59e2b068237c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1766.419276] env[62405]: INFO nova.compute.manager [None req-7aa1a96d-62c9-4103-852f-07c7e30ac76e tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] [instance: 9b21fa71-8a0e-446a-9492-59e2b068237c] Terminating instance [ 1766.667124] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08107d03-3e48-4471-b607-fb3ca44b0ae9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.675290] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b20fb3b9-d4ce-4137-a976-be544e27e7f4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.707088] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-606d42f4-e48a-4241-a079-f939874600e7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.715015] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3181d05a-050a-4c93-a437-38cfe2ae3211 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.728314] env[62405]: DEBUG nova.compute.provider_tree [None req-4ab447f8-448f-431a-bdc9-4891d949a542 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1766.823786] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: f0ca0d3d-cb2b-467b-a466-c270794055d7] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 1766.900601] env[62405]: DEBUG nova.compute.manager [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 2ab5f28c-1f71-4bea-8733-523e5570f5c6] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1766.923351] env[62405]: DEBUG nova.compute.manager [None req-7aa1a96d-62c9-4103-852f-07c7e30ac76e tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] [instance: 9b21fa71-8a0e-446a-9492-59e2b068237c] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1766.923580] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-7aa1a96d-62c9-4103-852f-07c7e30ac76e tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] [instance: 9b21fa71-8a0e-446a-9492-59e2b068237c] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1766.924469] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b122d7a4-6b51-449f-a90d-139aa66ee098 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.933093] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-7aa1a96d-62c9-4103-852f-07c7e30ac76e tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] [instance: 9b21fa71-8a0e-446a-9492-59e2b068237c] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1766.933333] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-288bcbbb-13be-4a4c-9ce5-be099a95f18c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.941205] env[62405]: DEBUG oslo_vmware.api [None req-7aa1a96d-62c9-4103-852f-07c7e30ac76e tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Waiting for the task: (returnval){ [ 1766.941205] env[62405]: value = "task-1947476" [ 1766.941205] env[62405]: _type = "Task" [ 1766.941205] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1766.951349] env[62405]: DEBUG oslo_vmware.api [None req-7aa1a96d-62c9-4103-852f-07c7e30ac76e tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Task: {'id': task-1947476, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1767.234042] env[62405]: DEBUG nova.scheduler.client.report [None req-4ab447f8-448f-431a-bdc9-4891d949a542 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1767.326651] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 377365a4-7538-4bab-a181-1940e6fb4066] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 1767.418478] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1767.451357] env[62405]: DEBUG oslo_vmware.api [None req-7aa1a96d-62c9-4103-852f-07c7e30ac76e tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Task: {'id': task-1947476, 'name': PowerOffVM_Task, 'duration_secs': 0.182945} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1767.451635] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-7aa1a96d-62c9-4103-852f-07c7e30ac76e tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] [instance: 9b21fa71-8a0e-446a-9492-59e2b068237c] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1767.451802] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-7aa1a96d-62c9-4103-852f-07c7e30ac76e tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] [instance: 9b21fa71-8a0e-446a-9492-59e2b068237c] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1767.452055] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3a0bfb3c-2c47-4cde-9bb3-308a301257eb {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.593151] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-7aa1a96d-62c9-4103-852f-07c7e30ac76e tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] [instance: 9b21fa71-8a0e-446a-9492-59e2b068237c] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1767.593361] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-7aa1a96d-62c9-4103-852f-07c7e30ac76e tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] [instance: 9b21fa71-8a0e-446a-9492-59e2b068237c] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1767.593536] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-7aa1a96d-62c9-4103-852f-07c7e30ac76e tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Deleting the datastore file [datastore1] 9b21fa71-8a0e-446a-9492-59e2b068237c {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1767.593800] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-33b2270f-efd6-44e9-a918-d5ba60285cb1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.600399] env[62405]: DEBUG oslo_vmware.api [None req-7aa1a96d-62c9-4103-852f-07c7e30ac76e tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Waiting for the task: (returnval){ [ 1767.600399] env[62405]: value = "task-1947478" [ 1767.600399] env[62405]: _type = "Task" [ 1767.600399] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1767.608665] env[62405]: DEBUG oslo_vmware.api [None req-7aa1a96d-62c9-4103-852f-07c7e30ac76e tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Task: {'id': task-1947478, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1767.830349] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 6213702e-8e39-4342-b62f-2c9495017bf9] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 1768.111845] env[62405]: DEBUG oslo_vmware.api [None req-7aa1a96d-62c9-4103-852f-07c7e30ac76e tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Task: {'id': task-1947478, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.128951} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1768.112160] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-7aa1a96d-62c9-4103-852f-07c7e30ac76e tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1768.112533] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-7aa1a96d-62c9-4103-852f-07c7e30ac76e tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] [instance: 9b21fa71-8a0e-446a-9492-59e2b068237c] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1768.112762] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-7aa1a96d-62c9-4103-852f-07c7e30ac76e tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] [instance: 9b21fa71-8a0e-446a-9492-59e2b068237c] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1768.112942] env[62405]: INFO nova.compute.manager [None req-7aa1a96d-62c9-4103-852f-07c7e30ac76e tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] [instance: 9b21fa71-8a0e-446a-9492-59e2b068237c] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1768.113311] env[62405]: DEBUG oslo.service.loopingcall [None req-7aa1a96d-62c9-4103-852f-07c7e30ac76e tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1768.113512] env[62405]: DEBUG nova.compute.manager [-] [instance: 9b21fa71-8a0e-446a-9492-59e2b068237c] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1768.113606] env[62405]: DEBUG nova.network.neutron [-] [instance: 9b21fa71-8a0e-446a-9492-59e2b068237c] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1768.243593] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4ab447f8-448f-431a-bdc9-4891d949a542 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.442s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1768.252058] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 54.790s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1768.253851] env[62405]: INFO nova.compute.claims [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] [instance: 742c8d94-48d1-4408-91dc-98f25661aa8d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1768.334906] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: a6a0e918-425d-44de-a22b-8779e9108533] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 1768.460488] env[62405]: DEBUG nova.compute.manager [req-00699bbc-c1f4-4da1-9af5-307ce6dee483 req-54575f53-a673-493d-9ce1-d4016f2998e2 service nova] [instance: 9b21fa71-8a0e-446a-9492-59e2b068237c] Received event network-vif-deleted-521b75bc-2756-4546-b0b5-969b4ac5d538 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1768.460488] env[62405]: INFO nova.compute.manager [req-00699bbc-c1f4-4da1-9af5-307ce6dee483 req-54575f53-a673-493d-9ce1-d4016f2998e2 service nova] [instance: 9b21fa71-8a0e-446a-9492-59e2b068237c] Neutron deleted interface 521b75bc-2756-4546-b0b5-969b4ac5d538; detaching it from the instance and deleting it from the info cache [ 1768.460718] env[62405]: DEBUG nova.network.neutron [req-00699bbc-c1f4-4da1-9af5-307ce6dee483 req-54575f53-a673-493d-9ce1-d4016f2998e2 service nova] [instance: 9b21fa71-8a0e-446a-9492-59e2b068237c] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1768.837017] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: a9f83357-4898-44ff-a6d8-ea6621453de9] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 1768.882159] env[62405]: DEBUG nova.network.neutron [-] [instance: 9b21fa71-8a0e-446a-9492-59e2b068237c] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1768.962702] env[62405]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d8cc3bcb-5749-4dcd-b821-ed1fe5a36304 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.974184] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-815d23a3-2554-4a27-87f2-a0a84990512c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.007558] env[62405]: DEBUG nova.compute.manager [req-00699bbc-c1f4-4da1-9af5-307ce6dee483 req-54575f53-a673-493d-9ce1-d4016f2998e2 service nova] [instance: 9b21fa71-8a0e-446a-9492-59e2b068237c] Detach interface failed, port_id=521b75bc-2756-4546-b0b5-969b4ac5d538, reason: Instance 9b21fa71-8a0e-446a-9492-59e2b068237c could not be found. {{(pid=62405) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11483}} [ 1769.340710] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 59957a81-5297-43d3-a673-024a53a19116] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 1769.384959] env[62405]: INFO nova.compute.manager [-] [instance: 9b21fa71-8a0e-446a-9492-59e2b068237c] Took 1.27 seconds to deallocate network for instance. [ 1769.642148] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dad56cd1-4a1d-4584-8b0b-b44d62e56cb0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.650379] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ecddad4-6ffc-4e10-a0f9-014d806fbfed {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.681314] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00aa6941-0b2f-44d2-882d-1c740f1c5915 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.689470] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8118215e-bbc4-462f-888a-2c82e2947b27 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.702785] env[62405]: DEBUG nova.compute.provider_tree [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1769.791201] env[62405]: INFO nova.compute.manager [None req-4ab447f8-448f-431a-bdc9-4891d949a542 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Swapping old allocation on dict_keys(['7d5eded7-a501-4fa6-b1d3-60e273d555d7']) held by migration 2e9f9f5c-75f9-4fbb-a793-3dac9f3417c4 for instance [ 1769.813384] env[62405]: DEBUG nova.scheduler.client.report [None req-4ab447f8-448f-431a-bdc9-4891d949a542 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Overwriting current allocation {'allocations': {'7d5eded7-a501-4fa6-b1d3-60e273d555d7': {'resources': {'VCPU': 1, 'MEMORY_MB': 256, 'DISK_GB': 1}, 'generation': 106}}, 'project_id': 'e3cd6b7f1ce346e98fe8bff2423f34ab', 'user_id': '1bea5fa632f74543a680f69edf3c05ff', 'consumer_generation': 1} on consumer 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3 {{(pid=62405) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 1769.843583] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: c392d6f3-b638-4857-826d-760c38b7d291] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 1769.891192] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7aa1a96d-62c9-4103-852f-07c7e30ac76e tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1769.983652] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4ab447f8-448f-431a-bdc9-4891d949a542 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Acquiring lock "refresh_cache-3c9487ff-2092-4cde-82d5-b38e5bc5c6e3" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1769.983852] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4ab447f8-448f-431a-bdc9-4891d949a542 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Acquired lock "refresh_cache-3c9487ff-2092-4cde-82d5-b38e5bc5c6e3" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1769.984070] env[62405]: DEBUG nova.network.neutron [None req-4ab447f8-448f-431a-bdc9-4891d949a542 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1770.206419] env[62405]: DEBUG nova.scheduler.client.report [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1770.348243] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 1cbdf601-4b0e-47ce-96d3-ffa0b77bd6ac] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 1770.688075] env[62405]: DEBUG nova.network.neutron [None req-4ab447f8-448f-431a-bdc9-4891d949a542 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Updating instance_info_cache with network_info: [{"id": "7fbae16c-e943-4752-8a7e-92bdea130e1a", "address": "fa:16:3e:f9:2e:fa", "network": {"id": "9e3e6e3d-df77-428f-b577-e4a42e82ec43", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.63", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "69dc3a146cd14230b1180689e2fea090", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31e77685-b4dd-4810-80ef-24115ea9ea62", "external-id": "nsx-vlan-transportzone-56", "segmentation_id": 56, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7fbae16c-e9", "ovs_interfaceid": "7fbae16c-e943-4752-8a7e-92bdea130e1a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1770.710924] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.459s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1770.711451] env[62405]: DEBUG nova.compute.manager [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] [instance: 742c8d94-48d1-4408-91dc-98f25661aa8d] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1770.714017] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2d276892-99fc-4e22-a015-4f2656f78414 tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 50.300s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1770.714217] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2d276892-99fc-4e22-a015-4f2656f78414 tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1770.716922] env[62405]: DEBUG oslo_concurrency.lockutils [None req-922bf869-d298-4d92-8aca-7264394af468 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 48.887s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1770.716922] env[62405]: DEBUG oslo_concurrency.lockutils [None req-922bf869-d298-4d92-8aca-7264394af468 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1770.719818] env[62405]: DEBUG oslo_concurrency.lockutils [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 46.396s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1770.719818] env[62405]: INFO nova.compute.claims [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] [instance: f410acd2-f786-43bd-ad60-0a6248dedb1c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1770.745414] env[62405]: INFO nova.scheduler.client.report [None req-922bf869-d298-4d92-8aca-7264394af468 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Deleted allocations for instance 78b4c6ea-6f5b-40d8-8c4a-10332f176e0b [ 1770.747242] env[62405]: INFO nova.scheduler.client.report [None req-2d276892-99fc-4e22-a015-4f2656f78414 tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Deleted allocations for instance aae3abca-951a-4149-9ccb-d70bea218aea [ 1770.850236] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 0f2d81cb-da2a-4664-b9a2-b8c3c38ddc73] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 1771.190566] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4ab447f8-448f-431a-bdc9-4891d949a542 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Releasing lock "refresh_cache-3c9487ff-2092-4cde-82d5-b38e5bc5c6e3" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1771.191065] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ab447f8-448f-431a-bdc9-4891d949a542 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1771.191375] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-20871e12-5edf-49b8-83c4-1416d39056ad {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.200792] env[62405]: DEBUG oslo_vmware.api [None req-4ab447f8-448f-431a-bdc9-4891d949a542 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Waiting for the task: (returnval){ [ 1771.200792] env[62405]: value = "task-1947479" [ 1771.200792] env[62405]: _type = "Task" [ 1771.200792] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1771.209656] env[62405]: DEBUG oslo_vmware.api [None req-4ab447f8-448f-431a-bdc9-4891d949a542 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Task: {'id': task-1947479, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1771.224380] env[62405]: DEBUG nova.compute.utils [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1771.227791] env[62405]: DEBUG nova.compute.manager [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] [instance: 742c8d94-48d1-4408-91dc-98f25661aa8d] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1771.227971] env[62405]: DEBUG nova.network.neutron [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] [instance: 742c8d94-48d1-4408-91dc-98f25661aa8d] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1771.257404] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2d276892-99fc-4e22-a015-4f2656f78414 tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Lock "aae3abca-951a-4149-9ccb-d70bea218aea" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 56.729s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1771.259165] env[62405]: DEBUG oslo_concurrency.lockutils [None req-922bf869-d298-4d92-8aca-7264394af468 tempest-ServersAdminNegativeTestJSON-1224553160 tempest-ServersAdminNegativeTestJSON-1224553160-project-member] Lock "78b4c6ea-6f5b-40d8-8c4a-10332f176e0b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 53.062s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1771.287222] env[62405]: DEBUG nova.policy [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2f670ba48efd4241a68d17077ce4f6aa', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '650fd0d6b10b4b88aac64a5b51c10ee7', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1771.354108] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 8ea521a4-7d4c-4f0f-9bf6-44b8559eefd6] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 1771.607721] env[62405]: DEBUG nova.network.neutron [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] [instance: 742c8d94-48d1-4408-91dc-98f25661aa8d] Successfully created port: 4cdbc50f-67a4-4007-ba8c-4b0690bb67c5 {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1771.712219] env[62405]: DEBUG oslo_vmware.api [None req-4ab447f8-448f-431a-bdc9-4891d949a542 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Task: {'id': task-1947479, 'name': PowerOffVM_Task, 'duration_secs': 0.174286} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1771.712603] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ab447f8-448f-431a-bdc9-4891d949a542 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1771.713372] env[62405]: DEBUG nova.virt.hardware [None req-4ab447f8-448f-431a-bdc9-4891d949a542 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:23:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='dc0b8b8d-2143-43d6-88ba-cc2419f1681a',id=27,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1321132944',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1771.713820] env[62405]: DEBUG nova.virt.hardware [None req-4ab447f8-448f-431a-bdc9-4891d949a542 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1771.713820] env[62405]: DEBUG nova.virt.hardware [None req-4ab447f8-448f-431a-bdc9-4891d949a542 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1771.714041] env[62405]: DEBUG nova.virt.hardware [None req-4ab447f8-448f-431a-bdc9-4891d949a542 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1771.714125] env[62405]: DEBUG nova.virt.hardware [None req-4ab447f8-448f-431a-bdc9-4891d949a542 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1771.714283] env[62405]: DEBUG nova.virt.hardware [None req-4ab447f8-448f-431a-bdc9-4891d949a542 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1771.714496] env[62405]: DEBUG nova.virt.hardware [None req-4ab447f8-448f-431a-bdc9-4891d949a542 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1771.714659] env[62405]: DEBUG nova.virt.hardware [None req-4ab447f8-448f-431a-bdc9-4891d949a542 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1771.714828] env[62405]: DEBUG nova.virt.hardware [None req-4ab447f8-448f-431a-bdc9-4891d949a542 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1771.714989] env[62405]: DEBUG nova.virt.hardware [None req-4ab447f8-448f-431a-bdc9-4891d949a542 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1771.715178] env[62405]: DEBUG nova.virt.hardware [None req-4ab447f8-448f-431a-bdc9-4891d949a542 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1771.720398] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cab438a1-68a4-4dd7-93d5-1bc1578cc7c7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.731260] env[62405]: DEBUG nova.compute.manager [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] [instance: 742c8d94-48d1-4408-91dc-98f25661aa8d] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1771.743608] env[62405]: DEBUG oslo_vmware.api [None req-4ab447f8-448f-431a-bdc9-4891d949a542 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Waiting for the task: (returnval){ [ 1771.743608] env[62405]: value = "task-1947481" [ 1771.743608] env[62405]: _type = "Task" [ 1771.743608] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1771.753793] env[62405]: DEBUG oslo_vmware.api [None req-4ab447f8-448f-431a-bdc9-4891d949a542 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Task: {'id': task-1947481, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1771.857804] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: d5686d7c-a73f-4e02-8726-eab8221a0eae] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 1772.134377] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4096f86d-e7db-43a0-a1e9-5287b0b4c8d2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.142422] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-577e179d-72ca-4cba-8c6f-46ee95ccb57e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.172890] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ed341d7-2d32-4b02-9274-2126795fff46 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.188524] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9debf72-3fce-421f-97b6-0de9110755e3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.202285] env[62405]: DEBUG nova.compute.provider_tree [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1772.254121] env[62405]: DEBUG oslo_vmware.api [None req-4ab447f8-448f-431a-bdc9-4891d949a542 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Task: {'id': task-1947481, 'name': ReconfigVM_Task, 'duration_secs': 0.138193} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1772.254977] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f85a9b51-7f18-4a0a-9b57-8944eb572bcd {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.274127] env[62405]: DEBUG nova.virt.hardware [None req-4ab447f8-448f-431a-bdc9-4891d949a542 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:23:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='dc0b8b8d-2143-43d6-88ba-cc2419f1681a',id=27,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1321132944',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1772.274447] env[62405]: DEBUG nova.virt.hardware [None req-4ab447f8-448f-431a-bdc9-4891d949a542 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1772.274692] env[62405]: DEBUG nova.virt.hardware [None req-4ab447f8-448f-431a-bdc9-4891d949a542 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1772.274831] env[62405]: DEBUG nova.virt.hardware [None req-4ab447f8-448f-431a-bdc9-4891d949a542 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1772.275011] env[62405]: DEBUG nova.virt.hardware [None req-4ab447f8-448f-431a-bdc9-4891d949a542 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1772.275201] env[62405]: DEBUG nova.virt.hardware [None req-4ab447f8-448f-431a-bdc9-4891d949a542 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1772.275421] env[62405]: DEBUG nova.virt.hardware [None req-4ab447f8-448f-431a-bdc9-4891d949a542 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1772.275583] env[62405]: DEBUG nova.virt.hardware [None req-4ab447f8-448f-431a-bdc9-4891d949a542 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1772.275752] env[62405]: DEBUG nova.virt.hardware [None req-4ab447f8-448f-431a-bdc9-4891d949a542 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1772.275918] env[62405]: DEBUG nova.virt.hardware [None req-4ab447f8-448f-431a-bdc9-4891d949a542 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1772.276143] env[62405]: DEBUG nova.virt.hardware [None req-4ab447f8-448f-431a-bdc9-4891d949a542 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1772.277607] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-75c07e64-0b22-4dd1-a6a8-e50b5f7cab20 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.284371] env[62405]: DEBUG oslo_vmware.api [None req-4ab447f8-448f-431a-bdc9-4891d949a542 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Waiting for the task: (returnval){ [ 1772.284371] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52883d43-c64e-db21-a71c-ef48f7e656ae" [ 1772.284371] env[62405]: _type = "Task" [ 1772.284371] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1772.292994] env[62405]: DEBUG oslo_vmware.api [None req-4ab447f8-448f-431a-bdc9-4891d949a542 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52883d43-c64e-db21-a71c-ef48f7e656ae, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1772.369985] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 3c05b8fc-32b8-42e4-b3c2-95d1cdbc3dc8] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 1772.705318] env[62405]: DEBUG nova.scheduler.client.report [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1772.742406] env[62405]: DEBUG nova.compute.manager [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] [instance: 742c8d94-48d1-4408-91dc-98f25661aa8d] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1772.771226] env[62405]: DEBUG nova.virt.hardware [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1772.771523] env[62405]: DEBUG nova.virt.hardware [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1772.771686] env[62405]: DEBUG nova.virt.hardware [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1772.771871] env[62405]: DEBUG nova.virt.hardware [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1772.772034] env[62405]: DEBUG nova.virt.hardware [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1772.772196] env[62405]: DEBUG nova.virt.hardware [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1772.772403] env[62405]: DEBUG nova.virt.hardware [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1772.772560] env[62405]: DEBUG nova.virt.hardware [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1772.772725] env[62405]: DEBUG nova.virt.hardware [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1772.772886] env[62405]: DEBUG nova.virt.hardware [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1772.773072] env[62405]: DEBUG nova.virt.hardware [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1772.773953] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74ec5581-7389-44e6-84e2-87e662ded47e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.781838] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbeb8619-5c62-4cba-9154-59bca1a65f56 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.795679] env[62405]: DEBUG oslo_vmware.api [None req-4ab447f8-448f-431a-bdc9-4891d949a542 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52883d43-c64e-db21-a71c-ef48f7e656ae, 'name': SearchDatastore_Task, 'duration_secs': 0.008141} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1772.807973] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-4ab447f8-448f-431a-bdc9-4891d949a542 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Reconfiguring VM instance instance-0000002d to detach disk 2000 {{(pid=62405) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1772.808605] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1224e63d-90bb-4e32-961c-e08f26429ae4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.828298] env[62405]: DEBUG oslo_vmware.api [None req-4ab447f8-448f-431a-bdc9-4891d949a542 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Waiting for the task: (returnval){ [ 1772.828298] env[62405]: value = "task-1947482" [ 1772.828298] env[62405]: _type = "Task" [ 1772.828298] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1772.837293] env[62405]: DEBUG oslo_vmware.api [None req-4ab447f8-448f-431a-bdc9-4891d949a542 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Task: {'id': task-1947482, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1772.872934] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 23748dfd-7c60-41db-8acb-7b49cf1c27db] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 1773.212459] env[62405]: DEBUG oslo_concurrency.lockutils [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.494s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1773.212673] env[62405]: DEBUG nova.compute.manager [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] [instance: f410acd2-f786-43bd-ad60-0a6248dedb1c] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1773.215702] env[62405]: DEBUG oslo_concurrency.lockutils [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 47.821s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1773.217290] env[62405]: INFO nova.compute.claims [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 7256b956-e41a-40ec-a687-a129a8bafcb6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1773.338905] env[62405]: DEBUG oslo_vmware.api [None req-4ab447f8-448f-431a-bdc9-4891d949a542 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Task: {'id': task-1947482, 'name': ReconfigVM_Task, 'duration_secs': 0.202725} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1773.339214] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-4ab447f8-448f-431a-bdc9-4891d949a542 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Reconfigured VM instance instance-0000002d to detach disk 2000 {{(pid=62405) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1773.339971] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-655642a4-6f59-4e8a-83a9-d8a6544f6f27 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.366375] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-4ab447f8-448f-431a-bdc9-4891d949a542 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Reconfiguring VM instance instance-0000002d to attach disk [datastore1] 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3/3c9487ff-2092-4cde-82d5-b38e5bc5c6e3.vmdk or device None with type thin {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1773.366780] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a37cdddb-fad9-42b7-a44a-f635ae28ac6e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.380276] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: a73579d1-8647-49fe-98ce-0baffd1a558f] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 1773.388482] env[62405]: DEBUG oslo_vmware.api [None req-4ab447f8-448f-431a-bdc9-4891d949a542 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Waiting for the task: (returnval){ [ 1773.388482] env[62405]: value = "task-1947483" [ 1773.388482] env[62405]: _type = "Task" [ 1773.388482] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1773.397278] env[62405]: DEBUG oslo_vmware.api [None req-4ab447f8-448f-431a-bdc9-4891d949a542 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Task: {'id': task-1947483, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1773.727851] env[62405]: DEBUG nova.compute.utils [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1773.728908] env[62405]: DEBUG nova.compute.manager [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] [instance: f410acd2-f786-43bd-ad60-0a6248dedb1c] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1773.729088] env[62405]: DEBUG nova.network.neutron [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] [instance: f410acd2-f786-43bd-ad60-0a6248dedb1c] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1773.845842] env[62405]: DEBUG nova.policy [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '029232c5d1164bd29d39df66a9054f4b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f89d800e697843108559c779e16fe3c3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1773.885575] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 0feaeb5d-9f4a-4166-99b1-f213bc4fa458] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 1773.899111] env[62405]: DEBUG oslo_vmware.api [None req-4ab447f8-448f-431a-bdc9-4891d949a542 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Task: {'id': task-1947483, 'name': ReconfigVM_Task, 'duration_secs': 0.27642} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1773.904033] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-4ab447f8-448f-431a-bdc9-4891d949a542 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Reconfigured VM instance instance-0000002d to attach disk [datastore1] 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3/3c9487ff-2092-4cde-82d5-b38e5bc5c6e3.vmdk or device None with type thin {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1773.904195] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dba602b-ea23-4bd6-8710-43735f97cb33 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.927518] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56acf095-7fc5-4b9f-a295-1009fa348cd7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.953015] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa637e0f-c37a-423d-a656-d516b40c170f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.979540] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26957964-cd84-42f1-8f01-30955431c78c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.988239] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ab447f8-448f-431a-bdc9-4891d949a542 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1773.988239] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e740181f-5b3d-4bec-af88-f98e0a03eb88 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.997651] env[62405]: DEBUG oslo_vmware.api [None req-4ab447f8-448f-431a-bdc9-4891d949a542 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Waiting for the task: (returnval){ [ 1773.997651] env[62405]: value = "task-1947484" [ 1773.997651] env[62405]: _type = "Task" [ 1773.997651] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1774.007498] env[62405]: DEBUG oslo_vmware.api [None req-4ab447f8-448f-431a-bdc9-4891d949a542 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Task: {'id': task-1947484, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1774.235401] env[62405]: DEBUG nova.compute.manager [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] [instance: f410acd2-f786-43bd-ad60-0a6248dedb1c] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1774.257704] env[62405]: DEBUG nova.network.neutron [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] [instance: f410acd2-f786-43bd-ad60-0a6248dedb1c] Successfully created port: 98b2004d-b7ad-4c97-bf77-8dbdb1077689 {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1774.387077] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 9e73e2ab-1eac-4aca-905f-a8391d3f5a9b] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 1774.508896] env[62405]: DEBUG oslo_vmware.api [None req-4ab447f8-448f-431a-bdc9-4891d949a542 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Task: {'id': task-1947484, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1774.672273] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62d50b55-fac5-43b4-90ef-521df6c54b61 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.681026] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0777f88e-0e90-4134-9130-39aaa77c40ff {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.716948] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92ef43ca-608d-4f8f-8936-3f1f8ff90ce8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.725871] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3896f8bc-79aa-4c9f-b5ba-736e9e1f3ec9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.747185] env[62405]: DEBUG nova.compute.provider_tree [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1774.890912] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 900b95b5-fe5a-46c1-909a-f81b82ced0ef] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 1775.009291] env[62405]: DEBUG oslo_vmware.api [None req-4ab447f8-448f-431a-bdc9-4891d949a542 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Task: {'id': task-1947484, 'name': PowerOnVM_Task, 'duration_secs': 0.523383} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1775.009558] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ab447f8-448f-431a-bdc9-4891d949a542 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1775.248397] env[62405]: DEBUG nova.compute.manager [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] [instance: f410acd2-f786-43bd-ad60-0a6248dedb1c] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1775.251330] env[62405]: DEBUG nova.scheduler.client.report [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1775.283140] env[62405]: DEBUG nova.virt.hardware [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1775.283140] env[62405]: DEBUG nova.virt.hardware [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1775.283140] env[62405]: DEBUG nova.virt.hardware [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1775.283140] env[62405]: DEBUG nova.virt.hardware [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1775.287098] env[62405]: DEBUG nova.virt.hardware [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1775.287098] env[62405]: DEBUG nova.virt.hardware [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1775.287098] env[62405]: DEBUG nova.virt.hardware [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1775.287098] env[62405]: DEBUG nova.virt.hardware [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1775.287098] env[62405]: DEBUG nova.virt.hardware [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1775.287098] env[62405]: DEBUG nova.virt.hardware [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1775.287098] env[62405]: DEBUG nova.virt.hardware [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1775.287606] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-019c71eb-2e79-42ac-95f6-f9a026dfc352 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.298497] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6472b55c-8544-454f-af32-d2832827dd36 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.302566] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a884c06d-da5b-4163-bf48-4001a6f63e46 tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Acquiring lock "777ddb84-25b9-4da6-be6b-a2289dbf510a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1775.302846] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a884c06d-da5b-4163-bf48-4001a6f63e46 tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Lock "777ddb84-25b9-4da6-be6b-a2289dbf510a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1775.303101] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a884c06d-da5b-4163-bf48-4001a6f63e46 tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Acquiring lock "777ddb84-25b9-4da6-be6b-a2289dbf510a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1775.303294] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a884c06d-da5b-4163-bf48-4001a6f63e46 tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Lock "777ddb84-25b9-4da6-be6b-a2289dbf510a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1775.303569] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a884c06d-da5b-4163-bf48-4001a6f63e46 tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Lock "777ddb84-25b9-4da6-be6b-a2289dbf510a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1775.305846] env[62405]: INFO nova.compute.manager [None req-a884c06d-da5b-4163-bf48-4001a6f63e46 tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [instance: 777ddb84-25b9-4da6-be6b-a2289dbf510a] Terminating instance [ 1775.366444] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Acquiring lock "d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1775.366444] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Lock "d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1775.396048] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 14dab775-19b4-4d0d-a7ee-67705f7e45ca] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 1775.757605] env[62405]: DEBUG oslo_concurrency.lockutils [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.542s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1775.758138] env[62405]: DEBUG nova.compute.manager [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 7256b956-e41a-40ec-a687-a129a8bafcb6] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1775.760748] env[62405]: DEBUG oslo_concurrency.lockutils [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 45.346s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1775.762117] env[62405]: INFO nova.compute.claims [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 9aa9e0de-7314-4d8b-8e9f-b6d330cae914] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1775.819613] env[62405]: DEBUG nova.compute.manager [None req-a884c06d-da5b-4163-bf48-4001a6f63e46 tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [instance: 777ddb84-25b9-4da6-be6b-a2289dbf510a] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1775.819613] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-a884c06d-da5b-4163-bf48-4001a6f63e46 tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [instance: 777ddb84-25b9-4da6-be6b-a2289dbf510a] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1775.820427] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-978b2f7e-f52b-48fe-9bce-5afd56bc51b8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.828906] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-a884c06d-da5b-4163-bf48-4001a6f63e46 tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [instance: 777ddb84-25b9-4da6-be6b-a2289dbf510a] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1775.829159] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cc2efb60-15bb-45fc-887f-5d9612d7eaee {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.835903] env[62405]: DEBUG oslo_vmware.api [None req-a884c06d-da5b-4163-bf48-4001a6f63e46 tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Waiting for the task: (returnval){ [ 1775.835903] env[62405]: value = "task-1947485" [ 1775.835903] env[62405]: _type = "Task" [ 1775.835903] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1775.845108] env[62405]: DEBUG oslo_vmware.api [None req-a884c06d-da5b-4163-bf48-4001a6f63e46 tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Task: {'id': task-1947485, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1775.898932] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: fae2dd0d-f83c-45ba-bfe4-8c51b0c885c9] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 1776.020663] env[62405]: INFO nova.compute.manager [None req-4ab447f8-448f-431a-bdc9-4891d949a542 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Updating instance to original state: 'active' [ 1776.270029] env[62405]: DEBUG nova.compute.utils [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1776.273469] env[62405]: DEBUG nova.compute.manager [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 7256b956-e41a-40ec-a687-a129a8bafcb6] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1776.274447] env[62405]: DEBUG nova.network.neutron [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 7256b956-e41a-40ec-a687-a129a8bafcb6] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1776.325897] env[62405]: DEBUG nova.policy [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2d2d71c15bbd4115857a5dcc06037c25', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd28fa9475f2f4a149bf00ccc63e70e3b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1776.348451] env[62405]: DEBUG oslo_vmware.api [None req-a884c06d-da5b-4163-bf48-4001a6f63e46 tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Task: {'id': task-1947485, 'name': PowerOffVM_Task, 'duration_secs': 0.215393} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1776.348550] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-a884c06d-da5b-4163-bf48-4001a6f63e46 tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [instance: 777ddb84-25b9-4da6-be6b-a2289dbf510a] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1776.348702] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-a884c06d-da5b-4163-bf48-4001a6f63e46 tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [instance: 777ddb84-25b9-4da6-be6b-a2289dbf510a] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1776.348965] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8b843bd4-e0f7-4f79-949a-df4c8426a959 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.402760] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: e8ed73c3-fb86-42c3-aae6-b0c8d03149ce] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 1776.622859] env[62405]: DEBUG nova.network.neutron [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 7256b956-e41a-40ec-a687-a129a8bafcb6] Successfully created port: a6bb60c9-208a-4c73-96e1-13626d7d1dd8 {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1776.774847] env[62405]: DEBUG nova.compute.manager [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 7256b956-e41a-40ec-a687-a129a8bafcb6] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1776.905486] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 65462c7a-372e-4ba6-8f6d-e300080d65d0] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 1777.234299] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e76fc097-b3c6-44e4-8c40-9cc42e8ec771 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.242922] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bc5223b-81fc-41dc-8fab-d121695b045d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.273577] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41dc9d4e-4e2d-4d7d-97b2-74d82dec332b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.281802] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac8f3bb1-9c4c-4926-b949-af49f232dac0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.302044] env[62405]: DEBUG nova.compute.provider_tree [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1777.356821] env[62405]: DEBUG oslo_concurrency.lockutils [None req-77b553dc-cca2-4316-818c-c6ea50255526 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Acquiring lock "3c9487ff-2092-4cde-82d5-b38e5bc5c6e3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1777.357052] env[62405]: DEBUG oslo_concurrency.lockutils [None req-77b553dc-cca2-4316-818c-c6ea50255526 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Lock "3c9487ff-2092-4cde-82d5-b38e5bc5c6e3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1777.357354] env[62405]: DEBUG oslo_concurrency.lockutils [None req-77b553dc-cca2-4316-818c-c6ea50255526 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Acquiring lock "3c9487ff-2092-4cde-82d5-b38e5bc5c6e3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1777.357544] env[62405]: DEBUG oslo_concurrency.lockutils [None req-77b553dc-cca2-4316-818c-c6ea50255526 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Lock "3c9487ff-2092-4cde-82d5-b38e5bc5c6e3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1777.357717] env[62405]: DEBUG oslo_concurrency.lockutils [None req-77b553dc-cca2-4316-818c-c6ea50255526 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Lock "3c9487ff-2092-4cde-82d5-b38e5bc5c6e3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1777.360508] env[62405]: INFO nova.compute.manager [None req-77b553dc-cca2-4316-818c-c6ea50255526 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Terminating instance [ 1777.410487] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: ca4d11fe-1d0f-468b-a2f4-21c5b84342ab] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 1777.639846] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-a884c06d-da5b-4163-bf48-4001a6f63e46 tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [instance: 777ddb84-25b9-4da6-be6b-a2289dbf510a] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1777.640172] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-a884c06d-da5b-4163-bf48-4001a6f63e46 tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [instance: 777ddb84-25b9-4da6-be6b-a2289dbf510a] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1777.640294] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-a884c06d-da5b-4163-bf48-4001a6f63e46 tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Deleting the datastore file [datastore1] 777ddb84-25b9-4da6-be6b-a2289dbf510a {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1777.640563] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a22013e9-3307-4484-bdc5-08ca2cead1c4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.648966] env[62405]: DEBUG oslo_vmware.api [None req-a884c06d-da5b-4163-bf48-4001a6f63e46 tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Waiting for the task: (returnval){ [ 1777.648966] env[62405]: value = "task-1947487" [ 1777.648966] env[62405]: _type = "Task" [ 1777.648966] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1777.658559] env[62405]: DEBUG oslo_vmware.api [None req-a884c06d-da5b-4163-bf48-4001a6f63e46 tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Task: {'id': task-1947487, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1777.792099] env[62405]: DEBUG nova.compute.manager [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 7256b956-e41a-40ec-a687-a129a8bafcb6] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1777.805843] env[62405]: DEBUG nova.scheduler.client.report [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1777.819832] env[62405]: DEBUG nova.virt.hardware [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1777.819832] env[62405]: DEBUG nova.virt.hardware [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1777.819832] env[62405]: DEBUG nova.virt.hardware [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1777.819832] env[62405]: DEBUG nova.virt.hardware [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1777.819832] env[62405]: DEBUG nova.virt.hardware [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1777.819832] env[62405]: DEBUG nova.virt.hardware [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1777.819832] env[62405]: DEBUG nova.virt.hardware [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1777.819832] env[62405]: DEBUG nova.virt.hardware [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1777.819832] env[62405]: DEBUG nova.virt.hardware [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1777.820493] env[62405]: DEBUG nova.virt.hardware [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1777.820493] env[62405]: DEBUG nova.virt.hardware [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1777.820984] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-856b8963-0cec-4b71-b3ad-1b30202862d8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.830435] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9ce88d5-41ea-48a4-8b62-21feb610a7c2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.864944] env[62405]: DEBUG nova.compute.manager [None req-77b553dc-cca2-4316-818c-c6ea50255526 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1777.865115] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-77b553dc-cca2-4316-818c-c6ea50255526 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1777.865912] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acb3914e-6d2d-42f0-830f-ea8627d1036a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.874312] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-77b553dc-cca2-4316-818c-c6ea50255526 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1777.874557] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d862a2f0-fd3e-4724-b6dd-2041353c4fa4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.881882] env[62405]: DEBUG oslo_vmware.api [None req-77b553dc-cca2-4316-818c-c6ea50255526 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Waiting for the task: (returnval){ [ 1777.881882] env[62405]: value = "task-1947488" [ 1777.881882] env[62405]: _type = "Task" [ 1777.881882] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1777.891255] env[62405]: DEBUG oslo_vmware.api [None req-77b553dc-cca2-4316-818c-c6ea50255526 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Task: {'id': task-1947488, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1777.913592] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: b21dc1e7-dacd-4154-9bc3-0fa3774695a8] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 1778.158805] env[62405]: DEBUG oslo_vmware.api [None req-a884c06d-da5b-4163-bf48-4001a6f63e46 tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Task: {'id': task-1947487, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.150906} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1778.159103] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-a884c06d-da5b-4163-bf48-4001a6f63e46 tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1778.159293] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-a884c06d-da5b-4163-bf48-4001a6f63e46 tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [instance: 777ddb84-25b9-4da6-be6b-a2289dbf510a] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1778.159472] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-a884c06d-da5b-4163-bf48-4001a6f63e46 tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [instance: 777ddb84-25b9-4da6-be6b-a2289dbf510a] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1778.159646] env[62405]: INFO nova.compute.manager [None req-a884c06d-da5b-4163-bf48-4001a6f63e46 tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] [instance: 777ddb84-25b9-4da6-be6b-a2289dbf510a] Took 2.34 seconds to destroy the instance on the hypervisor. [ 1778.159885] env[62405]: DEBUG oslo.service.loopingcall [None req-a884c06d-da5b-4163-bf48-4001a6f63e46 tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1778.160093] env[62405]: DEBUG nova.compute.manager [-] [instance: 777ddb84-25b9-4da6-be6b-a2289dbf510a] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1778.160188] env[62405]: DEBUG nova.network.neutron [-] [instance: 777ddb84-25b9-4da6-be6b-a2289dbf510a] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1778.310852] env[62405]: DEBUG oslo_concurrency.lockutils [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.550s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1778.311420] env[62405]: DEBUG nova.compute.manager [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 9aa9e0de-7314-4d8b-8e9f-b6d330cae914] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1778.314148] env[62405]: DEBUG oslo_concurrency.lockutils [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 42.539s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1778.315610] env[62405]: INFO nova.compute.claims [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 48554024-9b6f-44be-b21e-615b25cd790c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1778.391460] env[62405]: DEBUG oslo_vmware.api [None req-77b553dc-cca2-4316-818c-c6ea50255526 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Task: {'id': task-1947488, 'name': PowerOffVM_Task, 'duration_secs': 0.203468} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1778.391976] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-77b553dc-cca2-4316-818c-c6ea50255526 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1778.392646] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-77b553dc-cca2-4316-818c-c6ea50255526 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1778.392646] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e05bd487-55e1-4115-8b8c-f5228719e4b5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.416526] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: f8c6f99f-499f-4886-aae9-5f08969175f6] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 1778.624266] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-77b553dc-cca2-4316-818c-c6ea50255526 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1778.624507] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-77b553dc-cca2-4316-818c-c6ea50255526 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1778.624691] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-77b553dc-cca2-4316-818c-c6ea50255526 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Deleting the datastore file [datastore1] 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3 {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1778.624968] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cba257d0-d70b-4217-9b1d-8d7d0896b54c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.637979] env[62405]: DEBUG oslo_vmware.api [None req-77b553dc-cca2-4316-818c-c6ea50255526 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Waiting for the task: (returnval){ [ 1778.637979] env[62405]: value = "task-1947490" [ 1778.637979] env[62405]: _type = "Task" [ 1778.637979] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1778.649860] env[62405]: DEBUG oslo_vmware.api [None req-77b553dc-cca2-4316-818c-c6ea50255526 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Task: {'id': task-1947490, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1778.682646] env[62405]: DEBUG nova.network.neutron [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 7256b956-e41a-40ec-a687-a129a8bafcb6] Successfully updated port: a6bb60c9-208a-4c73-96e1-13626d7d1dd8 {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1778.747326] env[62405]: DEBUG nova.compute.manager [req-7b565d95-f519-4a49-a5f8-8b60bba5b095 req-806c4a0d-ddaa-44e2-aab9-2e2e48707376 service nova] [instance: 7256b956-e41a-40ec-a687-a129a8bafcb6] Received event network-vif-plugged-a6bb60c9-208a-4c73-96e1-13626d7d1dd8 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1778.747514] env[62405]: DEBUG oslo_concurrency.lockutils [req-7b565d95-f519-4a49-a5f8-8b60bba5b095 req-806c4a0d-ddaa-44e2-aab9-2e2e48707376 service nova] Acquiring lock "7256b956-e41a-40ec-a687-a129a8bafcb6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1778.747727] env[62405]: DEBUG oslo_concurrency.lockutils [req-7b565d95-f519-4a49-a5f8-8b60bba5b095 req-806c4a0d-ddaa-44e2-aab9-2e2e48707376 service nova] Lock "7256b956-e41a-40ec-a687-a129a8bafcb6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1778.747898] env[62405]: DEBUG oslo_concurrency.lockutils [req-7b565d95-f519-4a49-a5f8-8b60bba5b095 req-806c4a0d-ddaa-44e2-aab9-2e2e48707376 service nova] Lock "7256b956-e41a-40ec-a687-a129a8bafcb6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1778.748076] env[62405]: DEBUG nova.compute.manager [req-7b565d95-f519-4a49-a5f8-8b60bba5b095 req-806c4a0d-ddaa-44e2-aab9-2e2e48707376 service nova] [instance: 7256b956-e41a-40ec-a687-a129a8bafcb6] No waiting events found dispatching network-vif-plugged-a6bb60c9-208a-4c73-96e1-13626d7d1dd8 {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1778.748247] env[62405]: WARNING nova.compute.manager [req-7b565d95-f519-4a49-a5f8-8b60bba5b095 req-806c4a0d-ddaa-44e2-aab9-2e2e48707376 service nova] [instance: 7256b956-e41a-40ec-a687-a129a8bafcb6] Received unexpected event network-vif-plugged-a6bb60c9-208a-4c73-96e1-13626d7d1dd8 for instance with vm_state building and task_state spawning. [ 1778.822571] env[62405]: DEBUG nova.compute.utils [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1778.825301] env[62405]: DEBUG nova.compute.manager [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 9aa9e0de-7314-4d8b-8e9f-b6d330cae914] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1778.825301] env[62405]: DEBUG nova.network.neutron [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 9aa9e0de-7314-4d8b-8e9f-b6d330cae914] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1778.883692] env[62405]: DEBUG nova.policy [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2d2d71c15bbd4115857a5dcc06037c25', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd28fa9475f2f4a149bf00ccc63e70e3b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1778.920368] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: fbedaa93-5968-4b42-b93e-201d2b44b32b] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 1778.970686] env[62405]: DEBUG nova.network.neutron [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] [instance: 742c8d94-48d1-4408-91dc-98f25661aa8d] Successfully updated port: 4cdbc50f-67a4-4007-ba8c-4b0690bb67c5 {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1778.997323] env[62405]: DEBUG nova.compute.manager [req-724d3e32-7e32-490b-8f1d-97773740081b req-7cc10298-2e8c-451d-b2ab-af7c0b1913a2 service nova] [instance: f410acd2-f786-43bd-ad60-0a6248dedb1c] Received event network-vif-plugged-98b2004d-b7ad-4c97-bf77-8dbdb1077689 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1778.997323] env[62405]: DEBUG oslo_concurrency.lockutils [req-724d3e32-7e32-490b-8f1d-97773740081b req-7cc10298-2e8c-451d-b2ab-af7c0b1913a2 service nova] Acquiring lock "f410acd2-f786-43bd-ad60-0a6248dedb1c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1778.997524] env[62405]: DEBUG oslo_concurrency.lockutils [req-724d3e32-7e32-490b-8f1d-97773740081b req-7cc10298-2e8c-451d-b2ab-af7c0b1913a2 service nova] Lock "f410acd2-f786-43bd-ad60-0a6248dedb1c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1778.997697] env[62405]: DEBUG oslo_concurrency.lockutils [req-724d3e32-7e32-490b-8f1d-97773740081b req-7cc10298-2e8c-451d-b2ab-af7c0b1913a2 service nova] Lock "f410acd2-f786-43bd-ad60-0a6248dedb1c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1778.997870] env[62405]: DEBUG nova.compute.manager [req-724d3e32-7e32-490b-8f1d-97773740081b req-7cc10298-2e8c-451d-b2ab-af7c0b1913a2 service nova] [instance: f410acd2-f786-43bd-ad60-0a6248dedb1c] No waiting events found dispatching network-vif-plugged-98b2004d-b7ad-4c97-bf77-8dbdb1077689 {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1778.998056] env[62405]: WARNING nova.compute.manager [req-724d3e32-7e32-490b-8f1d-97773740081b req-7cc10298-2e8c-451d-b2ab-af7c0b1913a2 service nova] [instance: f410acd2-f786-43bd-ad60-0a6248dedb1c] Received unexpected event network-vif-plugged-98b2004d-b7ad-4c97-bf77-8dbdb1077689 for instance with vm_state building and task_state spawning. [ 1779.116835] env[62405]: DEBUG nova.network.neutron [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] [instance: f410acd2-f786-43bd-ad60-0a6248dedb1c] Successfully updated port: 98b2004d-b7ad-4c97-bf77-8dbdb1077689 {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1779.149020] env[62405]: DEBUG oslo_vmware.api [None req-77b553dc-cca2-4316-818c-c6ea50255526 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Task: {'id': task-1947490, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.152488} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1779.149488] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-77b553dc-cca2-4316-818c-c6ea50255526 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1779.149752] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-77b553dc-cca2-4316-818c-c6ea50255526 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1779.149947] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-77b553dc-cca2-4316-818c-c6ea50255526 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1779.150137] env[62405]: INFO nova.compute.manager [None req-77b553dc-cca2-4316-818c-c6ea50255526 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Took 1.29 seconds to destroy the instance on the hypervisor. [ 1779.150374] env[62405]: DEBUG oslo.service.loopingcall [None req-77b553dc-cca2-4316-818c-c6ea50255526 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1779.150560] env[62405]: DEBUG nova.compute.manager [-] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1779.150651] env[62405]: DEBUG nova.network.neutron [-] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1779.186534] env[62405]: DEBUG oslo_concurrency.lockutils [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Acquiring lock "refresh_cache-7256b956-e41a-40ec-a687-a129a8bafcb6" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1779.186534] env[62405]: DEBUG oslo_concurrency.lockutils [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Acquired lock "refresh_cache-7256b956-e41a-40ec-a687-a129a8bafcb6" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1779.186534] env[62405]: DEBUG nova.network.neutron [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 7256b956-e41a-40ec-a687-a129a8bafcb6] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1779.213839] env[62405]: DEBUG nova.network.neutron [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 9aa9e0de-7314-4d8b-8e9f-b6d330cae914] Successfully created port: dd55cf55-bb7f-4660-a37a-f2c0e4abc731 {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1779.329061] env[62405]: DEBUG nova.compute.manager [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 9aa9e0de-7314-4d8b-8e9f-b6d330cae914] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1779.425037] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: b3647042-89a1-4d15-b85e-49a5c8def1d4] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 1779.473145] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Acquiring lock "refresh_cache-742c8d94-48d1-4408-91dc-98f25661aa8d" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1779.473353] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Acquired lock "refresh_cache-742c8d94-48d1-4408-91dc-98f25661aa8d" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1779.473489] env[62405]: DEBUG nova.network.neutron [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] [instance: 742c8d94-48d1-4408-91dc-98f25661aa8d] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1779.617198] env[62405]: DEBUG oslo_concurrency.lockutils [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Acquiring lock "refresh_cache-f410acd2-f786-43bd-ad60-0a6248dedb1c" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1779.617496] env[62405]: DEBUG oslo_concurrency.lockutils [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Acquired lock "refresh_cache-f410acd2-f786-43bd-ad60-0a6248dedb1c" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1779.617606] env[62405]: DEBUG nova.network.neutron [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] [instance: f410acd2-f786-43bd-ad60-0a6248dedb1c] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1779.716790] env[62405]: DEBUG nova.network.neutron [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 7256b956-e41a-40ec-a687-a129a8bafcb6] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1779.753393] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d070e777-61ad-4dfb-b007-6bf0d724be9b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.761789] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e549cfe8-17eb-41a0-bc33-ad021113adb4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.797221] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ed0ead5-a261-479d-a491-b3e57ba41525 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.804711] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04328ce6-694d-4b77-b1fb-c10e1d8ead6f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.817966] env[62405]: DEBUG nova.compute.provider_tree [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1779.819847] env[62405]: DEBUG nova.network.neutron [-] [instance: 777ddb84-25b9-4da6-be6b-a2289dbf510a] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1779.869821] env[62405]: DEBUG nova.network.neutron [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 7256b956-e41a-40ec-a687-a129a8bafcb6] Updating instance_info_cache with network_info: [{"id": "a6bb60c9-208a-4c73-96e1-13626d7d1dd8", "address": "fa:16:3e:6f:12:15", "network": {"id": "858b74e5-8ffc-4b81-833e-5eb423dbf510", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-895575651-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d28fa9475f2f4a149bf00ccc63e70e3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37e8d2ee-abfc-42e2-a8fa-ee5447f1f1da", "external-id": "nsx-vlan-transportzone-813", "segmentation_id": 813, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa6bb60c9-20", "ovs_interfaceid": "a6bb60c9-208a-4c73-96e1-13626d7d1dd8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1779.930028] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 0eec4a5f-9f9b-4a86-a046-2e2d107adc48] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 1780.031206] env[62405]: DEBUG nova.network.neutron [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] [instance: 742c8d94-48d1-4408-91dc-98f25661aa8d] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1780.076597] env[62405]: DEBUG nova.network.neutron [-] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1780.162085] env[62405]: DEBUG nova.network.neutron [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] [instance: f410acd2-f786-43bd-ad60-0a6248dedb1c] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1780.279676] env[62405]: DEBUG nova.network.neutron [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] [instance: 742c8d94-48d1-4408-91dc-98f25661aa8d] Updating instance_info_cache with network_info: [{"id": "4cdbc50f-67a4-4007-ba8c-4b0690bb67c5", "address": "fa:16:3e:1b:ec:6c", "network": {"id": "d6172d52-fb9e-4751-8a35-39c4d4546683", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-778897133-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "650fd0d6b10b4b88aac64a5b51c10ee7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8f580e6-1d86-41ee-9ebe-c531cb9299c6", "external-id": "nsx-vlan-transportzone-150", "segmentation_id": 150, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4cdbc50f-67", "ovs_interfaceid": "4cdbc50f-67a4-4007-ba8c-4b0690bb67c5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1780.323052] env[62405]: INFO nova.compute.manager [-] [instance: 777ddb84-25b9-4da6-be6b-a2289dbf510a] Took 2.16 seconds to deallocate network for instance. [ 1780.339594] env[62405]: DEBUG nova.compute.manager [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 9aa9e0de-7314-4d8b-8e9f-b6d330cae914] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1780.345701] env[62405]: ERROR nova.scheduler.client.report [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [req-c0ce0376-de4f-4c00-bd77-9bfecb1b571b] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7d5eded7-a501-4fa6-b1d3-60e273d555d7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-c0ce0376-de4f-4c00-bd77-9bfecb1b571b"}]} [ 1780.363528] env[62405]: DEBUG nova.scheduler.client.report [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Refreshing inventories for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1780.373362] env[62405]: DEBUG nova.virt.hardware [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1780.373500] env[62405]: DEBUG nova.virt.hardware [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1780.373659] env[62405]: DEBUG nova.virt.hardware [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1780.373843] env[62405]: DEBUG nova.virt.hardware [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1780.373987] env[62405]: DEBUG nova.virt.hardware [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1780.374193] env[62405]: DEBUG nova.virt.hardware [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1780.374406] env[62405]: DEBUG nova.virt.hardware [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1780.374567] env[62405]: DEBUG nova.virt.hardware [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1780.374733] env[62405]: DEBUG nova.virt.hardware [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1780.374894] env[62405]: DEBUG nova.virt.hardware [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1780.375176] env[62405]: DEBUG nova.virt.hardware [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1780.375637] env[62405]: DEBUG oslo_concurrency.lockutils [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Releasing lock "refresh_cache-7256b956-e41a-40ec-a687-a129a8bafcb6" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1780.375911] env[62405]: DEBUG nova.compute.manager [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 7256b956-e41a-40ec-a687-a129a8bafcb6] Instance network_info: |[{"id": "a6bb60c9-208a-4c73-96e1-13626d7d1dd8", "address": "fa:16:3e:6f:12:15", "network": {"id": "858b74e5-8ffc-4b81-833e-5eb423dbf510", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-895575651-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d28fa9475f2f4a149bf00ccc63e70e3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37e8d2ee-abfc-42e2-a8fa-ee5447f1f1da", "external-id": "nsx-vlan-transportzone-813", "segmentation_id": 813, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa6bb60c9-20", "ovs_interfaceid": "a6bb60c9-208a-4c73-96e1-13626d7d1dd8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1780.376756] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cacdd7e-b3a5-4c81-a859-a06e59f5004c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.379951] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 7256b956-e41a-40ec-a687-a129a8bafcb6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6f:12:15', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '37e8d2ee-abfc-42e2-a8fa-ee5447f1f1da', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a6bb60c9-208a-4c73-96e1-13626d7d1dd8', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1780.387237] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Creating folder: Project (d28fa9475f2f4a149bf00ccc63e70e3b). Parent ref: group-v401284. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1780.388208] env[62405]: DEBUG nova.scheduler.client.report [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Updating ProviderTree inventory for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1780.388421] env[62405]: DEBUG nova.compute.provider_tree [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1780.390851] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fd7ffe93-5b20-48a8-bef1-8d6c122690ba {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.398552] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c746b050-ab5c-4746-8a13-807c626d462a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.402864] env[62405]: DEBUG nova.scheduler.client.report [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Refreshing aggregate associations for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7, aggregates: None {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1780.405608] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Created folder: Project (d28fa9475f2f4a149bf00ccc63e70e3b) in parent group-v401284. [ 1780.405795] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Creating folder: Instances. Parent ref: group-v401479. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1780.406663] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0a718ccd-faa2-433d-8b7b-63745a0898ef {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.417163] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Created folder: Instances in parent group-v401479. [ 1780.417417] env[62405]: DEBUG oslo.service.loopingcall [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1780.417600] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7256b956-e41a-40ec-a687-a129a8bafcb6] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1780.417790] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a5e748e3-408d-4199-bd4d-eba92911e4c6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.431891] env[62405]: DEBUG nova.network.neutron [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] [instance: f410acd2-f786-43bd-ad60-0a6248dedb1c] Updating instance_info_cache with network_info: [{"id": "98b2004d-b7ad-4c97-bf77-8dbdb1077689", "address": "fa:16:3e:57:1b:6a", "network": {"id": "beab648c-6cb4-4d39-9eb4-97500d8cb0ca", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-513861862-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f89d800e697843108559c779e16fe3c3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd72ef32-a57c-43b0-93df-e8a030987d44", "external-id": "nsx-vlan-transportzone-340", "segmentation_id": 340, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98b2004d-b7", "ovs_interfaceid": "98b2004d-b7ad-4c97-bf77-8dbdb1077689", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1780.433559] env[62405]: DEBUG nova.scheduler.client.report [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Refreshing trait associations for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1780.437120] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 792cd2c8-a67d-4b16-93ab-722fcc8b622d] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 1780.447323] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1780.447323] env[62405]: value = "task-1947493" [ 1780.447323] env[62405]: _type = "Task" [ 1780.447323] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1780.455994] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947493, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1780.579286] env[62405]: INFO nova.compute.manager [-] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Took 1.43 seconds to deallocate network for instance. [ 1780.775675] env[62405]: DEBUG nova.network.neutron [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 9aa9e0de-7314-4d8b-8e9f-b6d330cae914] Successfully updated port: dd55cf55-bb7f-4660-a37a-f2c0e4abc731 {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1780.781908] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Releasing lock "refresh_cache-742c8d94-48d1-4408-91dc-98f25661aa8d" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1780.782259] env[62405]: DEBUG nova.compute.manager [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] [instance: 742c8d94-48d1-4408-91dc-98f25661aa8d] Instance network_info: |[{"id": "4cdbc50f-67a4-4007-ba8c-4b0690bb67c5", "address": "fa:16:3e:1b:ec:6c", "network": {"id": "d6172d52-fb9e-4751-8a35-39c4d4546683", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-778897133-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "650fd0d6b10b4b88aac64a5b51c10ee7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8f580e6-1d86-41ee-9ebe-c531cb9299c6", "external-id": "nsx-vlan-transportzone-150", "segmentation_id": 150, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4cdbc50f-67", "ovs_interfaceid": "4cdbc50f-67a4-4007-ba8c-4b0690bb67c5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1780.782679] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] [instance: 742c8d94-48d1-4408-91dc-98f25661aa8d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1b:ec:6c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8f580e6-1d86-41ee-9ebe-c531cb9299c6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4cdbc50f-67a4-4007-ba8c-4b0690bb67c5', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1780.790744] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Creating folder: Project (650fd0d6b10b4b88aac64a5b51c10ee7). Parent ref: group-v401284. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1780.792474] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-374ae042-0f0e-44de-a473-8b02c2d34f76 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.796154] env[62405]: DEBUG nova.compute.manager [req-662d280d-e315-40e5-93d3-9ac92ddfe923 req-cdfa097e-f162-405f-92d6-da595a35af31 service nova] [instance: 7256b956-e41a-40ec-a687-a129a8bafcb6] Received event network-changed-a6bb60c9-208a-4c73-96e1-13626d7d1dd8 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1780.796363] env[62405]: DEBUG nova.compute.manager [req-662d280d-e315-40e5-93d3-9ac92ddfe923 req-cdfa097e-f162-405f-92d6-da595a35af31 service nova] [instance: 7256b956-e41a-40ec-a687-a129a8bafcb6] Refreshing instance network info cache due to event network-changed-a6bb60c9-208a-4c73-96e1-13626d7d1dd8. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1780.796579] env[62405]: DEBUG oslo_concurrency.lockutils [req-662d280d-e315-40e5-93d3-9ac92ddfe923 req-cdfa097e-f162-405f-92d6-da595a35af31 service nova] Acquiring lock "refresh_cache-7256b956-e41a-40ec-a687-a129a8bafcb6" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1780.796725] env[62405]: DEBUG oslo_concurrency.lockutils [req-662d280d-e315-40e5-93d3-9ac92ddfe923 req-cdfa097e-f162-405f-92d6-da595a35af31 service nova] Acquired lock "refresh_cache-7256b956-e41a-40ec-a687-a129a8bafcb6" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1780.796882] env[62405]: DEBUG nova.network.neutron [req-662d280d-e315-40e5-93d3-9ac92ddfe923 req-cdfa097e-f162-405f-92d6-da595a35af31 service nova] [instance: 7256b956-e41a-40ec-a687-a129a8bafcb6] Refreshing network info cache for port a6bb60c9-208a-4c73-96e1-13626d7d1dd8 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1780.808463] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Created folder: Project (650fd0d6b10b4b88aac64a5b51c10ee7) in parent group-v401284. [ 1780.808635] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Creating folder: Instances. Parent ref: group-v401482. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1780.809485] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-da6178d8-fa43-4674-804b-85ec23d42d0e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.820592] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Created folder: Instances in parent group-v401482. [ 1780.820818] env[62405]: DEBUG oslo.service.loopingcall [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1780.821224] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 742c8d94-48d1-4408-91dc-98f25661aa8d] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1780.821427] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-897873d9-9c91-4bc2-a1fe-3234b4dbcf8d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.837870] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a884c06d-da5b-4163-bf48-4001a6f63e46 tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1780.843585] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1780.843585] env[62405]: value = "task-1947496" [ 1780.843585] env[62405]: _type = "Task" [ 1780.843585] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1780.851763] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947496, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1780.860015] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b182bd0a-49d4-4552-9fd2-51c263418ce3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.866312] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a0c74c0-65d4-4c96-9d86-f69cbdfd9092 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.896025] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2faa5fd5-2eda-4559-b4a1-90ad79c02212 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.903230] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9df8497b-77f9-4026-ac5d-25e68b698322 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.917996] env[62405]: DEBUG nova.compute.provider_tree [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1780.941756] env[62405]: DEBUG oslo_concurrency.lockutils [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Releasing lock "refresh_cache-f410acd2-f786-43bd-ad60-0a6248dedb1c" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1780.942100] env[62405]: DEBUG nova.compute.manager [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] [instance: f410acd2-f786-43bd-ad60-0a6248dedb1c] Instance network_info: |[{"id": "98b2004d-b7ad-4c97-bf77-8dbdb1077689", "address": "fa:16:3e:57:1b:6a", "network": {"id": "beab648c-6cb4-4d39-9eb4-97500d8cb0ca", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-513861862-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f89d800e697843108559c779e16fe3c3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd72ef32-a57c-43b0-93df-e8a030987d44", "external-id": "nsx-vlan-transportzone-340", "segmentation_id": 340, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98b2004d-b7", "ovs_interfaceid": "98b2004d-b7ad-4c97-bf77-8dbdb1077689", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1780.942499] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 3f9849b8-6aaa-4d32-b140-207d5b54d68f] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 1780.944492] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] [instance: f410acd2-f786-43bd-ad60-0a6248dedb1c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:57:1b:6a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dd72ef32-a57c-43b0-93df-e8a030987d44', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '98b2004d-b7ad-4c97-bf77-8dbdb1077689', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1780.951969] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Creating folder: Project (f89d800e697843108559c779e16fe3c3). Parent ref: group-v401284. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1780.952761] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fe24945e-e036-4a05-aa5b-46c90f78040a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.964549] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947493, 'name': CreateVM_Task, 'duration_secs': 0.353462} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1780.964779] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7256b956-e41a-40ec-a687-a129a8bafcb6] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1780.965576] env[62405]: DEBUG oslo_concurrency.lockutils [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1780.965755] env[62405]: DEBUG oslo_concurrency.lockutils [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1780.966194] env[62405]: DEBUG oslo_concurrency.lockutils [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1780.967125] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-11dc84f3-2d31-4484-8e11-6b4b51f45dc5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.970450] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Created folder: Project (f89d800e697843108559c779e16fe3c3) in parent group-v401284. [ 1780.970666] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Creating folder: Instances. Parent ref: group-v401485. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1780.971261] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-34dbe026-8059-4a2a-891c-50f65ae236ab {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.974374] env[62405]: DEBUG oslo_vmware.api [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Waiting for the task: (returnval){ [ 1780.974374] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52231c2d-d641-328e-dec7-c5e99e2efecf" [ 1780.974374] env[62405]: _type = "Task" [ 1780.974374] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1780.982692] env[62405]: DEBUG oslo_vmware.api [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52231c2d-d641-328e-dec7-c5e99e2efecf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1780.983810] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Created folder: Instances in parent group-v401485. [ 1780.984063] env[62405]: DEBUG oslo.service.loopingcall [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1780.984252] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f410acd2-f786-43bd-ad60-0a6248dedb1c] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1780.984450] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e1f3c8f6-3be5-4881-885c-39ab98021540 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1781.003132] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1781.003132] env[62405]: value = "task-1947499" [ 1781.003132] env[62405]: _type = "Task" [ 1781.003132] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1781.015132] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947499, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1781.027933] env[62405]: DEBUG nova.compute.manager [req-d627d53e-c067-4eca-b71f-4b36f58fcfac req-c65107da-4516-469c-93ce-fa4d791bf4ea service nova] [instance: f410acd2-f786-43bd-ad60-0a6248dedb1c] Received event network-changed-98b2004d-b7ad-4c97-bf77-8dbdb1077689 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1781.028162] env[62405]: DEBUG nova.compute.manager [req-d627d53e-c067-4eca-b71f-4b36f58fcfac req-c65107da-4516-469c-93ce-fa4d791bf4ea service nova] [instance: f410acd2-f786-43bd-ad60-0a6248dedb1c] Refreshing instance network info cache due to event network-changed-98b2004d-b7ad-4c97-bf77-8dbdb1077689. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1781.028398] env[62405]: DEBUG oslo_concurrency.lockutils [req-d627d53e-c067-4eca-b71f-4b36f58fcfac req-c65107da-4516-469c-93ce-fa4d791bf4ea service nova] Acquiring lock "refresh_cache-f410acd2-f786-43bd-ad60-0a6248dedb1c" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1781.028558] env[62405]: DEBUG oslo_concurrency.lockutils [req-d627d53e-c067-4eca-b71f-4b36f58fcfac req-c65107da-4516-469c-93ce-fa4d791bf4ea service nova] Acquired lock "refresh_cache-f410acd2-f786-43bd-ad60-0a6248dedb1c" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1781.028722] env[62405]: DEBUG nova.network.neutron [req-d627d53e-c067-4eca-b71f-4b36f58fcfac req-c65107da-4516-469c-93ce-fa4d791bf4ea service nova] [instance: f410acd2-f786-43bd-ad60-0a6248dedb1c] Refreshing network info cache for port 98b2004d-b7ad-4c97-bf77-8dbdb1077689 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1781.086340] env[62405]: DEBUG oslo_concurrency.lockutils [None req-77b553dc-cca2-4316-818c-c6ea50255526 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1781.279239] env[62405]: DEBUG oslo_concurrency.lockutils [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Acquiring lock "refresh_cache-9aa9e0de-7314-4d8b-8e9f-b6d330cae914" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1781.279239] env[62405]: DEBUG oslo_concurrency.lockutils [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Acquired lock "refresh_cache-9aa9e0de-7314-4d8b-8e9f-b6d330cae914" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1781.279336] env[62405]: DEBUG nova.network.neutron [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 9aa9e0de-7314-4d8b-8e9f-b6d330cae914] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1781.352805] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947496, 'name': CreateVM_Task, 'duration_secs': 0.314516} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1781.352980] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 742c8d94-48d1-4408-91dc-98f25661aa8d] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1781.353945] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1781.452509] env[62405]: DEBUG nova.scheduler.client.report [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Updated inventory for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with generation 108 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1781.452816] env[62405]: DEBUG nova.compute.provider_tree [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Updating resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 generation from 108 to 109 during operation: update_inventory {{(pid=62405) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1781.453062] env[62405]: DEBUG nova.compute.provider_tree [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1781.456445] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: b8ff115b-64f1-4584-afa2-478c5e6b726b] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 1781.485504] env[62405]: DEBUG oslo_vmware.api [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52231c2d-d641-328e-dec7-c5e99e2efecf, 'name': SearchDatastore_Task, 'duration_secs': 0.008629} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1781.486498] env[62405]: DEBUG oslo_concurrency.lockutils [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1781.486498] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 7256b956-e41a-40ec-a687-a129a8bafcb6] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1781.486498] env[62405]: DEBUG oslo_concurrency.lockutils [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1781.486498] env[62405]: DEBUG oslo_concurrency.lockutils [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1781.486684] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1781.486865] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1781.487197] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1781.487416] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6914cd64-e31a-4769-8097-301f0680a7ed {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1781.489196] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e6017f74-5f68-4dfd-91c6-5139be1a560e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1781.494560] env[62405]: DEBUG oslo_vmware.api [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Waiting for the task: (returnval){ [ 1781.494560] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]522abde2-acf9-8234-547e-e23a80caa5ae" [ 1781.494560] env[62405]: _type = "Task" [ 1781.494560] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1781.502008] env[62405]: DEBUG oslo_vmware.api [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]522abde2-acf9-8234-547e-e23a80caa5ae, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1781.510383] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947499, 'name': CreateVM_Task, 'duration_secs': 0.344794} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1781.511052] env[62405]: DEBUG nova.network.neutron [req-662d280d-e315-40e5-93d3-9ac92ddfe923 req-cdfa097e-f162-405f-92d6-da595a35af31 service nova] [instance: 7256b956-e41a-40ec-a687-a129a8bafcb6] Updated VIF entry in instance network info cache for port a6bb60c9-208a-4c73-96e1-13626d7d1dd8. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1781.511375] env[62405]: DEBUG nova.network.neutron [req-662d280d-e315-40e5-93d3-9ac92ddfe923 req-cdfa097e-f162-405f-92d6-da595a35af31 service nova] [instance: 7256b956-e41a-40ec-a687-a129a8bafcb6] Updating instance_info_cache with network_info: [{"id": "a6bb60c9-208a-4c73-96e1-13626d7d1dd8", "address": "fa:16:3e:6f:12:15", "network": {"id": "858b74e5-8ffc-4b81-833e-5eb423dbf510", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-895575651-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d28fa9475f2f4a149bf00ccc63e70e3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37e8d2ee-abfc-42e2-a8fa-ee5447f1f1da", "external-id": "nsx-vlan-transportzone-813", "segmentation_id": 813, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa6bb60c9-20", "ovs_interfaceid": "a6bb60c9-208a-4c73-96e1-13626d7d1dd8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1781.512376] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f410acd2-f786-43bd-ad60-0a6248dedb1c] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1781.512969] env[62405]: DEBUG oslo_concurrency.lockutils [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1781.515018] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1781.515193] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1781.515870] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-41b03353-d777-442d-ba5b-b11fe733b09c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1781.521323] env[62405]: DEBUG oslo_vmware.api [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Waiting for the task: (returnval){ [ 1781.521323] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]525fb527-0a60-a5ca-5135-d658d785ec0e" [ 1781.521323] env[62405]: _type = "Task" [ 1781.521323] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1781.532621] env[62405]: DEBUG oslo_vmware.api [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]525fb527-0a60-a5ca-5135-d658d785ec0e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1781.725575] env[62405]: DEBUG nova.network.neutron [req-d627d53e-c067-4eca-b71f-4b36f58fcfac req-c65107da-4516-469c-93ce-fa4d791bf4ea service nova] [instance: f410acd2-f786-43bd-ad60-0a6248dedb1c] Updated VIF entry in instance network info cache for port 98b2004d-b7ad-4c97-bf77-8dbdb1077689. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1781.725959] env[62405]: DEBUG nova.network.neutron [req-d627d53e-c067-4eca-b71f-4b36f58fcfac req-c65107da-4516-469c-93ce-fa4d791bf4ea service nova] [instance: f410acd2-f786-43bd-ad60-0a6248dedb1c] Updating instance_info_cache with network_info: [{"id": "98b2004d-b7ad-4c97-bf77-8dbdb1077689", "address": "fa:16:3e:57:1b:6a", "network": {"id": "beab648c-6cb4-4d39-9eb4-97500d8cb0ca", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-513861862-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f89d800e697843108559c779e16fe3c3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd72ef32-a57c-43b0-93df-e8a030987d44", "external-id": "nsx-vlan-transportzone-340", "segmentation_id": 340, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98b2004d-b7", "ovs_interfaceid": "98b2004d-b7ad-4c97-bf77-8dbdb1077689", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1781.808509] env[62405]: DEBUG nova.network.neutron [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 9aa9e0de-7314-4d8b-8e9f-b6d330cae914] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1781.931914] env[62405]: DEBUG nova.network.neutron [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 9aa9e0de-7314-4d8b-8e9f-b6d330cae914] Updating instance_info_cache with network_info: [{"id": "dd55cf55-bb7f-4660-a37a-f2c0e4abc731", "address": "fa:16:3e:53:06:d6", "network": {"id": "858b74e5-8ffc-4b81-833e-5eb423dbf510", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-895575651-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d28fa9475f2f4a149bf00ccc63e70e3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37e8d2ee-abfc-42e2-a8fa-ee5447f1f1da", "external-id": "nsx-vlan-transportzone-813", "segmentation_id": 813, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdd55cf55-bb", "ovs_interfaceid": "dd55cf55-bb7f-4660-a37a-f2c0e4abc731", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1781.959186] env[62405]: DEBUG oslo_concurrency.lockutils [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.645s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1781.959750] env[62405]: DEBUG nova.compute.manager [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 48554024-9b6f-44be-b21e-615b25cd790c] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1781.962829] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: ca0aca02-4b99-4393-900c-b9cb0dad55c7] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 1781.964548] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f8b24120-387e-441c-b47e-4319efc188e9 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 42.967s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1781.964742] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f8b24120-387e-441c-b47e-4319efc188e9 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1781.966767] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f3478f72-e7cb-4106-8e6c-c14c8f96572f tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 42.914s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1781.966958] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f3478f72-e7cb-4106-8e6c-c14c8f96572f tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1781.968644] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 41.978s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1781.970050] env[62405]: INFO nova.compute.claims [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] [instance: 153adb6e-5381-4e91-881e-8e566a16905a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1781.992868] env[62405]: INFO nova.scheduler.client.report [None req-f8b24120-387e-441c-b47e-4319efc188e9 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Deleted allocations for instance 9b495caf-4394-40c0-b68f-d02c7d759a6a [ 1781.996641] env[62405]: INFO nova.scheduler.client.report [None req-f3478f72-e7cb-4106-8e6c-c14c8f96572f tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Deleted allocations for instance dbb5dda5-5420-4d7b-8b32-152d51cb2fb9 [ 1782.011470] env[62405]: DEBUG oslo_vmware.api [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]522abde2-acf9-8234-547e-e23a80caa5ae, 'name': SearchDatastore_Task, 'duration_secs': 0.027087} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1782.011803] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1782.011988] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] [instance: 742c8d94-48d1-4408-91dc-98f25661aa8d] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1782.012218] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1782.012447] env[62405]: DEBUG oslo_concurrency.lockutils [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1782.012746] env[62405]: DEBUG oslo_concurrency.lockutils [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1782.012985] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-652fedd0-11d3-4167-b6a7-2b776f84d04e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.014875] env[62405]: DEBUG oslo_concurrency.lockutils [req-662d280d-e315-40e5-93d3-9ac92ddfe923 req-cdfa097e-f162-405f-92d6-da595a35af31 service nova] Releasing lock "refresh_cache-7256b956-e41a-40ec-a687-a129a8bafcb6" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1782.015769] env[62405]: DEBUG nova.compute.manager [req-662d280d-e315-40e5-93d3-9ac92ddfe923 req-cdfa097e-f162-405f-92d6-da595a35af31 service nova] [instance: 742c8d94-48d1-4408-91dc-98f25661aa8d] Received event network-vif-plugged-4cdbc50f-67a4-4007-ba8c-4b0690bb67c5 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1782.015769] env[62405]: DEBUG oslo_concurrency.lockutils [req-662d280d-e315-40e5-93d3-9ac92ddfe923 req-cdfa097e-f162-405f-92d6-da595a35af31 service nova] Acquiring lock "742c8d94-48d1-4408-91dc-98f25661aa8d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1782.015769] env[62405]: DEBUG oslo_concurrency.lockutils [req-662d280d-e315-40e5-93d3-9ac92ddfe923 req-cdfa097e-f162-405f-92d6-da595a35af31 service nova] Lock "742c8d94-48d1-4408-91dc-98f25661aa8d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1782.015769] env[62405]: DEBUG oslo_concurrency.lockutils [req-662d280d-e315-40e5-93d3-9ac92ddfe923 req-cdfa097e-f162-405f-92d6-da595a35af31 service nova] Lock "742c8d94-48d1-4408-91dc-98f25661aa8d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1782.015992] env[62405]: DEBUG nova.compute.manager [req-662d280d-e315-40e5-93d3-9ac92ddfe923 req-cdfa097e-f162-405f-92d6-da595a35af31 service nova] [instance: 742c8d94-48d1-4408-91dc-98f25661aa8d] No waiting events found dispatching network-vif-plugged-4cdbc50f-67a4-4007-ba8c-4b0690bb67c5 {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1782.015992] env[62405]: WARNING nova.compute.manager [req-662d280d-e315-40e5-93d3-9ac92ddfe923 req-cdfa097e-f162-405f-92d6-da595a35af31 service nova] [instance: 742c8d94-48d1-4408-91dc-98f25661aa8d] Received unexpected event network-vif-plugged-4cdbc50f-67a4-4007-ba8c-4b0690bb67c5 for instance with vm_state building and task_state spawning. [ 1782.016192] env[62405]: DEBUG nova.compute.manager [req-662d280d-e315-40e5-93d3-9ac92ddfe923 req-cdfa097e-f162-405f-92d6-da595a35af31 service nova] [instance: 742c8d94-48d1-4408-91dc-98f25661aa8d] Received event network-changed-4cdbc50f-67a4-4007-ba8c-4b0690bb67c5 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1782.016358] env[62405]: DEBUG nova.compute.manager [req-662d280d-e315-40e5-93d3-9ac92ddfe923 req-cdfa097e-f162-405f-92d6-da595a35af31 service nova] [instance: 742c8d94-48d1-4408-91dc-98f25661aa8d] Refreshing instance network info cache due to event network-changed-4cdbc50f-67a4-4007-ba8c-4b0690bb67c5. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1782.016543] env[62405]: DEBUG oslo_concurrency.lockutils [req-662d280d-e315-40e5-93d3-9ac92ddfe923 req-cdfa097e-f162-405f-92d6-da595a35af31 service nova] Acquiring lock "refresh_cache-742c8d94-48d1-4408-91dc-98f25661aa8d" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1782.016681] env[62405]: DEBUG oslo_concurrency.lockutils [req-662d280d-e315-40e5-93d3-9ac92ddfe923 req-cdfa097e-f162-405f-92d6-da595a35af31 service nova] Acquired lock "refresh_cache-742c8d94-48d1-4408-91dc-98f25661aa8d" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1782.016838] env[62405]: DEBUG nova.network.neutron [req-662d280d-e315-40e5-93d3-9ac92ddfe923 req-cdfa097e-f162-405f-92d6-da595a35af31 service nova] [instance: 742c8d94-48d1-4408-91dc-98f25661aa8d] Refreshing network info cache for port 4cdbc50f-67a4-4007-ba8c-4b0690bb67c5 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1782.020889] env[62405]: DEBUG oslo_vmware.api [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Waiting for the task: (returnval){ [ 1782.020889] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]521fd2b5-bbf6-25a0-a11d-3ff1d6559d81" [ 1782.020889] env[62405]: _type = "Task" [ 1782.020889] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1782.033449] env[62405]: DEBUG oslo_vmware.api [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]521fd2b5-bbf6-25a0-a11d-3ff1d6559d81, 'name': SearchDatastore_Task, 'duration_secs': 0.0097} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1782.036744] env[62405]: DEBUG oslo_concurrency.lockutils [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1782.036983] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] [instance: f410acd2-f786-43bd-ad60-0a6248dedb1c] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1782.037208] env[62405]: DEBUG oslo_concurrency.lockutils [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1782.037674] env[62405]: DEBUG oslo_vmware.api [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]525fb527-0a60-a5ca-5135-d658d785ec0e, 'name': SearchDatastore_Task, 'duration_secs': 0.008906} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1782.038411] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5d54a8af-8ea2-49dd-b278-4fee3c1ebba2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.043182] env[62405]: DEBUG oslo_vmware.api [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Waiting for the task: (returnval){ [ 1782.043182] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5229dad9-9d1b-426a-3c5f-47a5dad6f858" [ 1782.043182] env[62405]: _type = "Task" [ 1782.043182] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1782.050962] env[62405]: DEBUG oslo_vmware.api [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5229dad9-9d1b-426a-3c5f-47a5dad6f858, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1782.229521] env[62405]: DEBUG oslo_concurrency.lockutils [req-d627d53e-c067-4eca-b71f-4b36f58fcfac req-c65107da-4516-469c-93ce-fa4d791bf4ea service nova] Releasing lock "refresh_cache-f410acd2-f786-43bd-ad60-0a6248dedb1c" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1782.229861] env[62405]: DEBUG nova.compute.manager [req-d627d53e-c067-4eca-b71f-4b36f58fcfac req-c65107da-4516-469c-93ce-fa4d791bf4ea service nova] [instance: 777ddb84-25b9-4da6-be6b-a2289dbf510a] Received event network-vif-deleted-c3206a84-3d77-4640-bfae-253a30dfa63c {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1782.230105] env[62405]: DEBUG nova.compute.manager [req-d627d53e-c067-4eca-b71f-4b36f58fcfac req-c65107da-4516-469c-93ce-fa4d791bf4ea service nova] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Received event network-vif-deleted-7fbae16c-e943-4752-8a7e-92bdea130e1a {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1782.434807] env[62405]: DEBUG oslo_concurrency.lockutils [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Releasing lock "refresh_cache-9aa9e0de-7314-4d8b-8e9f-b6d330cae914" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1782.435165] env[62405]: DEBUG nova.compute.manager [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 9aa9e0de-7314-4d8b-8e9f-b6d330cae914] Instance network_info: |[{"id": "dd55cf55-bb7f-4660-a37a-f2c0e4abc731", "address": "fa:16:3e:53:06:d6", "network": {"id": "858b74e5-8ffc-4b81-833e-5eb423dbf510", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-895575651-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d28fa9475f2f4a149bf00ccc63e70e3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37e8d2ee-abfc-42e2-a8fa-ee5447f1f1da", "external-id": "nsx-vlan-transportzone-813", "segmentation_id": 813, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdd55cf55-bb", "ovs_interfaceid": "dd55cf55-bb7f-4660-a37a-f2c0e4abc731", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1782.435592] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 9aa9e0de-7314-4d8b-8e9f-b6d330cae914] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:53:06:d6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '37e8d2ee-abfc-42e2-a8fa-ee5447f1f1da', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dd55cf55-bb7f-4660-a37a-f2c0e4abc731', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1782.443048] env[62405]: DEBUG oslo.service.loopingcall [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1782.443218] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9aa9e0de-7314-4d8b-8e9f-b6d330cae914] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1782.443457] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fd2f5fb0-01fa-4591-8335-3297e70e7696 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.462648] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1782.462648] env[62405]: value = "task-1947500" [ 1782.462648] env[62405]: _type = "Task" [ 1782.462648] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1782.469910] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947500, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1782.475372] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: a1d35009-ea11-4e64-bbe4-604ed39d08f4] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 1782.477872] env[62405]: DEBUG nova.compute.utils [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1782.479242] env[62405]: DEBUG nova.compute.manager [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 48554024-9b6f-44be-b21e-615b25cd790c] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1782.479404] env[62405]: DEBUG nova.network.neutron [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 48554024-9b6f-44be-b21e-615b25cd790c] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1782.510129] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f8b24120-387e-441c-b47e-4319efc188e9 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Lock "9b495caf-4394-40c0-b68f-d02c7d759a6a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 47.144s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1782.511306] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f3478f72-e7cb-4106-8e6c-c14c8f96572f tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Lock "dbb5dda5-5420-4d7b-8b32-152d51cb2fb9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 47.065s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1782.530850] env[62405]: DEBUG nova.policy [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2d2d71c15bbd4115857a5dcc06037c25', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd28fa9475f2f4a149bf00ccc63e70e3b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1782.555312] env[62405]: DEBUG oslo_vmware.api [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5229dad9-9d1b-426a-3c5f-47a5dad6f858, 'name': SearchDatastore_Task, 'duration_secs': 0.008868} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1782.555586] env[62405]: DEBUG oslo_concurrency.lockutils [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1782.555842] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 7256b956-e41a-40ec-a687-a129a8bafcb6/7256b956-e41a-40ec-a687-a129a8bafcb6.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1782.556141] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1782.556364] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1782.556842] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a5965da5-dd4f-497f-b0bb-464f545e884b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.558483] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-067e372e-19a3-4337-84be-0a05c09f01a1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.565212] env[62405]: DEBUG oslo_vmware.api [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Waiting for the task: (returnval){ [ 1782.565212] env[62405]: value = "task-1947501" [ 1782.565212] env[62405]: _type = "Task" [ 1782.565212] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1782.570029] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1782.570212] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1782.574164] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-db07c887-9858-4651-9440-5b3242acce86 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.578993] env[62405]: DEBUG oslo_vmware.api [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Task: {'id': task-1947501, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1782.584502] env[62405]: DEBUG oslo_vmware.api [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Waiting for the task: (returnval){ [ 1782.584502] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]522ae196-1115-51b2-7660-ff863131a70c" [ 1782.584502] env[62405]: _type = "Task" [ 1782.584502] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1782.591830] env[62405]: DEBUG oslo_vmware.api [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]522ae196-1115-51b2-7660-ff863131a70c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1782.799101] env[62405]: DEBUG nova.network.neutron [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 48554024-9b6f-44be-b21e-615b25cd790c] Successfully created port: 483a3097-a1d8-4e46-8db6-09591eb7e3d4 {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1782.879749] env[62405]: DEBUG nova.network.neutron [req-662d280d-e315-40e5-93d3-9ac92ddfe923 req-cdfa097e-f162-405f-92d6-da595a35af31 service nova] [instance: 742c8d94-48d1-4408-91dc-98f25661aa8d] Updated VIF entry in instance network info cache for port 4cdbc50f-67a4-4007-ba8c-4b0690bb67c5. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1782.880122] env[62405]: DEBUG nova.network.neutron [req-662d280d-e315-40e5-93d3-9ac92ddfe923 req-cdfa097e-f162-405f-92d6-da595a35af31 service nova] [instance: 742c8d94-48d1-4408-91dc-98f25661aa8d] Updating instance_info_cache with network_info: [{"id": "4cdbc50f-67a4-4007-ba8c-4b0690bb67c5", "address": "fa:16:3e:1b:ec:6c", "network": {"id": "d6172d52-fb9e-4751-8a35-39c4d4546683", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-778897133-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "650fd0d6b10b4b88aac64a5b51c10ee7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8f580e6-1d86-41ee-9ebe-c531cb9299c6", "external-id": "nsx-vlan-transportzone-150", "segmentation_id": 150, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4cdbc50f-67", "ovs_interfaceid": "4cdbc50f-67a4-4007-ba8c-4b0690bb67c5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1782.901435] env[62405]: DEBUG nova.compute.manager [req-b6a1ad90-cf43-4afe-86dd-c9d5df211f3a req-35a36699-83eb-41aa-960d-475b31a2bc92 service nova] [instance: 9aa9e0de-7314-4d8b-8e9f-b6d330cae914] Received event network-changed-dd55cf55-bb7f-4660-a37a-f2c0e4abc731 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1782.901628] env[62405]: DEBUG nova.compute.manager [req-b6a1ad90-cf43-4afe-86dd-c9d5df211f3a req-35a36699-83eb-41aa-960d-475b31a2bc92 service nova] [instance: 9aa9e0de-7314-4d8b-8e9f-b6d330cae914] Refreshing instance network info cache due to event network-changed-dd55cf55-bb7f-4660-a37a-f2c0e4abc731. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1782.901859] env[62405]: DEBUG oslo_concurrency.lockutils [req-b6a1ad90-cf43-4afe-86dd-c9d5df211f3a req-35a36699-83eb-41aa-960d-475b31a2bc92 service nova] Acquiring lock "refresh_cache-9aa9e0de-7314-4d8b-8e9f-b6d330cae914" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1782.902010] env[62405]: DEBUG oslo_concurrency.lockutils [req-b6a1ad90-cf43-4afe-86dd-c9d5df211f3a req-35a36699-83eb-41aa-960d-475b31a2bc92 service nova] Acquired lock "refresh_cache-9aa9e0de-7314-4d8b-8e9f-b6d330cae914" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1782.902180] env[62405]: DEBUG nova.network.neutron [req-b6a1ad90-cf43-4afe-86dd-c9d5df211f3a req-35a36699-83eb-41aa-960d-475b31a2bc92 service nova] [instance: 9aa9e0de-7314-4d8b-8e9f-b6d330cae914] Refreshing network info cache for port dd55cf55-bb7f-4660-a37a-f2c0e4abc731 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1782.974679] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947500, 'name': CreateVM_Task, 'duration_secs': 0.499394} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1782.975139] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9aa9e0de-7314-4d8b-8e9f-b6d330cae914] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1782.976691] env[62405]: DEBUG oslo_concurrency.lockutils [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1782.977183] env[62405]: DEBUG oslo_concurrency.lockutils [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1782.977770] env[62405]: DEBUG oslo_concurrency.lockutils [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1782.979084] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2e774cac-41d7-4130-b565-79b9e6762640 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.986109] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 8995f9cb-8454-4a98-9090-290f87f8af18] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 1782.988740] env[62405]: DEBUG nova.compute.manager [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 48554024-9b6f-44be-b21e-615b25cd790c] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1783.003654] env[62405]: DEBUG oslo_vmware.api [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Waiting for the task: (returnval){ [ 1783.003654] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52c7bec2-348b-f371-a786-efa45bcdc639" [ 1783.003654] env[62405]: _type = "Task" [ 1783.003654] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1783.018029] env[62405]: DEBUG oslo_vmware.api [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52c7bec2-348b-f371-a786-efa45bcdc639, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1783.075489] env[62405]: DEBUG oslo_vmware.api [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Task: {'id': task-1947501, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1783.094152] env[62405]: DEBUG oslo_vmware.api [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]522ae196-1115-51b2-7660-ff863131a70c, 'name': SearchDatastore_Task, 'duration_secs': 0.03211} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1783.094878] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-76af33a7-d161-46d5-8d9e-438027c745ba {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.102224] env[62405]: DEBUG oslo_vmware.api [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Waiting for the task: (returnval){ [ 1783.102224] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]526583a4-a316-9d5c-52b2-50cfce158309" [ 1783.102224] env[62405]: _type = "Task" [ 1783.102224] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1783.110465] env[62405]: DEBUG oslo_vmware.api [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]526583a4-a316-9d5c-52b2-50cfce158309, 'name': SearchDatastore_Task, 'duration_secs': 0.008312} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1783.110704] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1783.110963] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 742c8d94-48d1-4408-91dc-98f25661aa8d/742c8d94-48d1-4408-91dc-98f25661aa8d.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1783.111566] env[62405]: DEBUG oslo_concurrency.lockutils [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1783.111758] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1783.111968] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a85f495e-085f-4f3e-8151-65fd5c212d95 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.116152] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-34bde414-2c1e-4643-be71-966b6d981f69 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.122581] env[62405]: DEBUG oslo_vmware.api [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Waiting for the task: (returnval){ [ 1783.122581] env[62405]: value = "task-1947502" [ 1783.122581] env[62405]: _type = "Task" [ 1783.122581] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1783.127008] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1783.127218] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1783.130231] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-217dbc22-73a5-4c0e-b75a-287edbc6cc5f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.135299] env[62405]: DEBUG oslo_vmware.api [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Task: {'id': task-1947502, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1783.138193] env[62405]: DEBUG oslo_vmware.api [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Waiting for the task: (returnval){ [ 1783.138193] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]522acc84-0dd8-b592-5fb3-ac0115dacec4" [ 1783.138193] env[62405]: _type = "Task" [ 1783.138193] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1783.146255] env[62405]: DEBUG oslo_vmware.api [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]522acc84-0dd8-b592-5fb3-ac0115dacec4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1783.383427] env[62405]: DEBUG oslo_concurrency.lockutils [req-662d280d-e315-40e5-93d3-9ac92ddfe923 req-cdfa097e-f162-405f-92d6-da595a35af31 service nova] Releasing lock "refresh_cache-742c8d94-48d1-4408-91dc-98f25661aa8d" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1783.383704] env[62405]: DEBUG nova.compute.manager [req-662d280d-e315-40e5-93d3-9ac92ddfe923 req-cdfa097e-f162-405f-92d6-da595a35af31 service nova] [instance: 9aa9e0de-7314-4d8b-8e9f-b6d330cae914] Received event network-vif-plugged-dd55cf55-bb7f-4660-a37a-f2c0e4abc731 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1783.383952] env[62405]: DEBUG oslo_concurrency.lockutils [req-662d280d-e315-40e5-93d3-9ac92ddfe923 req-cdfa097e-f162-405f-92d6-da595a35af31 service nova] Acquiring lock "9aa9e0de-7314-4d8b-8e9f-b6d330cae914-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1783.384230] env[62405]: DEBUG oslo_concurrency.lockutils [req-662d280d-e315-40e5-93d3-9ac92ddfe923 req-cdfa097e-f162-405f-92d6-da595a35af31 service nova] Lock "9aa9e0de-7314-4d8b-8e9f-b6d330cae914-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1783.384424] env[62405]: DEBUG oslo_concurrency.lockutils [req-662d280d-e315-40e5-93d3-9ac92ddfe923 req-cdfa097e-f162-405f-92d6-da595a35af31 service nova] Lock "9aa9e0de-7314-4d8b-8e9f-b6d330cae914-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1783.384594] env[62405]: DEBUG nova.compute.manager [req-662d280d-e315-40e5-93d3-9ac92ddfe923 req-cdfa097e-f162-405f-92d6-da595a35af31 service nova] [instance: 9aa9e0de-7314-4d8b-8e9f-b6d330cae914] No waiting events found dispatching network-vif-plugged-dd55cf55-bb7f-4660-a37a-f2c0e4abc731 {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1783.384764] env[62405]: WARNING nova.compute.manager [req-662d280d-e315-40e5-93d3-9ac92ddfe923 req-cdfa097e-f162-405f-92d6-da595a35af31 service nova] [instance: 9aa9e0de-7314-4d8b-8e9f-b6d330cae914] Received unexpected event network-vif-plugged-dd55cf55-bb7f-4660-a37a-f2c0e4abc731 for instance with vm_state building and task_state spawning. [ 1783.415162] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b5b03fc-ac0f-480e-98f5-a82863143dad {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.427120] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4b5757f-6f46-4995-99b4-1f84db3c7e74 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.459854] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad6b32d9-d23d-4f37-8d04-3d38e879f8c1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.468255] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4284881-b10a-4b35-bc43-7541558bc94c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.482498] env[62405]: DEBUG nova.compute.provider_tree [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1783.496887] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 02abae6c-8962-49eb-8fa9-36b13a20eff1] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 1783.515658] env[62405]: DEBUG oslo_vmware.api [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52c7bec2-348b-f371-a786-efa45bcdc639, 'name': SearchDatastore_Task, 'duration_secs': 0.067224} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1783.515961] env[62405]: DEBUG oslo_concurrency.lockutils [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1783.516662] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 9aa9e0de-7314-4d8b-8e9f-b6d330cae914] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1783.516935] env[62405]: DEBUG oslo_concurrency.lockutils [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1783.575949] env[62405]: DEBUG oslo_vmware.api [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Task: {'id': task-1947501, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.523328} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1783.576561] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 7256b956-e41a-40ec-a687-a129a8bafcb6/7256b956-e41a-40ec-a687-a129a8bafcb6.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1783.578605] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 7256b956-e41a-40ec-a687-a129a8bafcb6] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1783.578605] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-93374706-69a9-4863-a44c-000859e75826 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.583861] env[62405]: DEBUG oslo_vmware.api [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Waiting for the task: (returnval){ [ 1783.583861] env[62405]: value = "task-1947503" [ 1783.583861] env[62405]: _type = "Task" [ 1783.583861] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1783.593132] env[62405]: DEBUG oslo_vmware.api [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Task: {'id': task-1947503, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1783.633205] env[62405]: DEBUG oslo_vmware.api [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Task: {'id': task-1947502, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.488492} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1783.636558] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 742c8d94-48d1-4408-91dc-98f25661aa8d/742c8d94-48d1-4408-91dc-98f25661aa8d.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1783.636795] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] [instance: 742c8d94-48d1-4408-91dc-98f25661aa8d] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1783.637076] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7a394065-63b2-4363-9c5d-4936a272aef7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.651961] env[62405]: DEBUG oslo_vmware.api [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]522acc84-0dd8-b592-5fb3-ac0115dacec4, 'name': SearchDatastore_Task, 'duration_secs': 0.008221} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1783.651961] env[62405]: DEBUG oslo_vmware.api [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Waiting for the task: (returnval){ [ 1783.651961] env[62405]: value = "task-1947504" [ 1783.651961] env[62405]: _type = "Task" [ 1783.651961] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1783.651961] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-122b481d-f2e7-4457-826e-caeba9d9c319 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.663095] env[62405]: DEBUG oslo_vmware.api [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Waiting for the task: (returnval){ [ 1783.663095] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52b99555-77c3-e140-0e9a-68e22240fa35" [ 1783.663095] env[62405]: _type = "Task" [ 1783.663095] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1783.663372] env[62405]: DEBUG oslo_vmware.api [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Task: {'id': task-1947504, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1783.671609] env[62405]: DEBUG oslo_vmware.api [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52b99555-77c3-e140-0e9a-68e22240fa35, 'name': SearchDatastore_Task, 'duration_secs': 0.009689} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1783.671861] env[62405]: DEBUG oslo_concurrency.lockutils [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1783.672144] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] f410acd2-f786-43bd-ad60-0a6248dedb1c/f410acd2-f786-43bd-ad60-0a6248dedb1c.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1783.672432] env[62405]: DEBUG oslo_concurrency.lockutils [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1783.672634] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1783.672852] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6c16ee51-d3d0-49ac-973b-6476ce265a98 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.674847] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-01330cbd-fb3e-48ce-ae1b-8e51cc1aed87 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.681911] env[62405]: DEBUG oslo_vmware.api [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Waiting for the task: (returnval){ [ 1783.681911] env[62405]: value = "task-1947505" [ 1783.681911] env[62405]: _type = "Task" [ 1783.681911] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1783.682994] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1783.683194] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1783.686501] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6102f84a-c88a-4bba-9cc9-1ce1987cb651 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.694166] env[62405]: DEBUG oslo_vmware.api [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Task: {'id': task-1947505, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1783.695060] env[62405]: DEBUG oslo_vmware.api [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Waiting for the task: (returnval){ [ 1783.695060] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5266c311-ec49-e424-40cc-5b9c36384421" [ 1783.695060] env[62405]: _type = "Task" [ 1783.695060] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1783.702897] env[62405]: DEBUG oslo_vmware.api [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5266c311-ec49-e424-40cc-5b9c36384421, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1783.798128] env[62405]: DEBUG nova.network.neutron [req-b6a1ad90-cf43-4afe-86dd-c9d5df211f3a req-35a36699-83eb-41aa-960d-475b31a2bc92 service nova] [instance: 9aa9e0de-7314-4d8b-8e9f-b6d330cae914] Updated VIF entry in instance network info cache for port dd55cf55-bb7f-4660-a37a-f2c0e4abc731. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1783.798570] env[62405]: DEBUG nova.network.neutron [req-b6a1ad90-cf43-4afe-86dd-c9d5df211f3a req-35a36699-83eb-41aa-960d-475b31a2bc92 service nova] [instance: 9aa9e0de-7314-4d8b-8e9f-b6d330cae914] Updating instance_info_cache with network_info: [{"id": "dd55cf55-bb7f-4660-a37a-f2c0e4abc731", "address": "fa:16:3e:53:06:d6", "network": {"id": "858b74e5-8ffc-4b81-833e-5eb423dbf510", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-895575651-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d28fa9475f2f4a149bf00ccc63e70e3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37e8d2ee-abfc-42e2-a8fa-ee5447f1f1da", "external-id": "nsx-vlan-transportzone-813", "segmentation_id": 813, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdd55cf55-bb", "ovs_interfaceid": "dd55cf55-bb7f-4660-a37a-f2c0e4abc731", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1783.985795] env[62405]: DEBUG nova.scheduler.client.report [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1784.004576] env[62405]: DEBUG nova.compute.manager [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 48554024-9b6f-44be-b21e-615b25cd790c] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1784.006980] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 7db1b086-942e-4890-8750-0d717e522786] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 1784.039533] env[62405]: DEBUG nova.virt.hardware [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:21Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1784.039792] env[62405]: DEBUG nova.virt.hardware [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1784.039951] env[62405]: DEBUG nova.virt.hardware [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1784.040150] env[62405]: DEBUG nova.virt.hardware [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1784.040298] env[62405]: DEBUG nova.virt.hardware [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1784.040767] env[62405]: DEBUG nova.virt.hardware [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1784.040767] env[62405]: DEBUG nova.virt.hardware [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1784.040864] env[62405]: DEBUG nova.virt.hardware [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1784.040960] env[62405]: DEBUG nova.virt.hardware [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1784.041113] env[62405]: DEBUG nova.virt.hardware [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1784.041710] env[62405]: DEBUG nova.virt.hardware [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1784.042571] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c8b47a0-e9af-4649-bce4-51d985f67c2e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.053142] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75cd0fd0-23d2-4b72-b570-9fc53c6ad5ef {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.094195] env[62405]: DEBUG oslo_vmware.api [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Task: {'id': task-1947503, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073172} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1784.094513] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 7256b956-e41a-40ec-a687-a129a8bafcb6] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1784.095335] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31dee9a9-c276-4c87-b306-071d2c442b65 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.120372] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 7256b956-e41a-40ec-a687-a129a8bafcb6] Reconfiguring VM instance instance-00000042 to attach disk [datastore1] 7256b956-e41a-40ec-a687-a129a8bafcb6/7256b956-e41a-40ec-a687-a129a8bafcb6.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1784.121327] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3d5b65bb-67a0-4cb6-b306-8a7956b853e5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.141427] env[62405]: DEBUG oslo_vmware.api [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Waiting for the task: (returnval){ [ 1784.141427] env[62405]: value = "task-1947506" [ 1784.141427] env[62405]: _type = "Task" [ 1784.141427] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1784.152088] env[62405]: DEBUG oslo_vmware.api [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Task: {'id': task-1947506, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1784.156363] env[62405]: DEBUG oslo_concurrency.lockutils [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Acquiring lock "00158b10-4292-48f3-85a0-991af1dbc5f1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1784.156631] env[62405]: DEBUG oslo_concurrency.lockutils [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Lock "00158b10-4292-48f3-85a0-991af1dbc5f1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1784.168349] env[62405]: DEBUG oslo_vmware.api [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Task: {'id': task-1947504, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075687} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1784.168667] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] [instance: 742c8d94-48d1-4408-91dc-98f25661aa8d] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1784.169622] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2ea1dcd-a2ed-4b7d-92d0-2b3920a258b9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.195374] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] [instance: 742c8d94-48d1-4408-91dc-98f25661aa8d] Reconfiguring VM instance instance-00000040 to attach disk [datastore1] 742c8d94-48d1-4408-91dc-98f25661aa8d/742c8d94-48d1-4408-91dc-98f25661aa8d.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1784.200431] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d557c45b-696c-4e83-8454-9669dcd271bc {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.220236] env[62405]: DEBUG oslo_concurrency.lockutils [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Acquiring lock "b4b89cf6-4159-40fa-8b67-4d8bbf16eb32" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1784.220596] env[62405]: DEBUG oslo_concurrency.lockutils [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Lock "b4b89cf6-4159-40fa-8b67-4d8bbf16eb32" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1784.226762] env[62405]: DEBUG oslo_vmware.api [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Task: {'id': task-1947505, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.543823} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1784.231181] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] f410acd2-f786-43bd-ad60-0a6248dedb1c/f410acd2-f786-43bd-ad60-0a6248dedb1c.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1784.231351] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] [instance: f410acd2-f786-43bd-ad60-0a6248dedb1c] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1784.231663] env[62405]: DEBUG oslo_vmware.api [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Waiting for the task: (returnval){ [ 1784.231663] env[62405]: value = "task-1947507" [ 1784.231663] env[62405]: _type = "Task" [ 1784.231663] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1784.231869] env[62405]: DEBUG oslo_vmware.api [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5266c311-ec49-e424-40cc-5b9c36384421, 'name': SearchDatastore_Task, 'duration_secs': 0.021701} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1784.232083] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fe535e79-cd23-4fee-9e31-36cd92ae868a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.237528] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b573c84c-d696-42d4-baa8-26e3b5770c05 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.243476] env[62405]: DEBUG oslo_vmware.api [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Waiting for the task: (returnval){ [ 1784.243476] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52a80f75-e488-4ae8-4f7a-1d5d25aa1498" [ 1784.243476] env[62405]: _type = "Task" [ 1784.243476] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1784.246220] env[62405]: DEBUG oslo_vmware.api [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Task: {'id': task-1947507, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1784.250456] env[62405]: DEBUG oslo_vmware.api [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Waiting for the task: (returnval){ [ 1784.250456] env[62405]: value = "task-1947508" [ 1784.250456] env[62405]: _type = "Task" [ 1784.250456] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1784.257454] env[62405]: DEBUG oslo_vmware.api [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52a80f75-e488-4ae8-4f7a-1d5d25aa1498, 'name': SearchDatastore_Task, 'duration_secs': 0.009772} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1784.258127] env[62405]: DEBUG oslo_concurrency.lockutils [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1784.258361] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 9aa9e0de-7314-4d8b-8e9f-b6d330cae914/9aa9e0de-7314-4d8b-8e9f-b6d330cae914.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1784.258639] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-33f2eb92-88b1-4bbb-8c54-9c3040368683 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.265177] env[62405]: DEBUG oslo_vmware.api [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Task: {'id': task-1947508, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1784.268293] env[62405]: DEBUG oslo_vmware.api [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Waiting for the task: (returnval){ [ 1784.268293] env[62405]: value = "task-1947509" [ 1784.268293] env[62405]: _type = "Task" [ 1784.268293] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1784.276563] env[62405]: DEBUG oslo_vmware.api [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Task: {'id': task-1947509, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1784.301884] env[62405]: DEBUG oslo_concurrency.lockutils [req-b6a1ad90-cf43-4afe-86dd-c9d5df211f3a req-35a36699-83eb-41aa-960d-475b31a2bc92 service nova] Releasing lock "refresh_cache-9aa9e0de-7314-4d8b-8e9f-b6d330cae914" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1784.395384] env[62405]: DEBUG nova.network.neutron [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 48554024-9b6f-44be-b21e-615b25cd790c] Successfully updated port: 483a3097-a1d8-4e46-8db6-09591eb7e3d4 {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1784.492008] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.523s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1784.492669] env[62405]: DEBUG nova.compute.manager [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] [instance: 153adb6e-5381-4e91-881e-8e566a16905a] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1784.495885] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 40.192s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1784.497515] env[62405]: INFO nova.compute.claims [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1784.510142] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 801e7086-5742-4a04-962c-7546284aa12d] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 1784.653607] env[62405]: DEBUG oslo_vmware.api [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Task: {'id': task-1947506, 'name': ReconfigVM_Task, 'duration_secs': 0.335367} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1784.653818] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 7256b956-e41a-40ec-a687-a129a8bafcb6] Reconfigured VM instance instance-00000042 to attach disk [datastore1] 7256b956-e41a-40ec-a687-a129a8bafcb6/7256b956-e41a-40ec-a687-a129a8bafcb6.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1784.655018] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f47daf73-42c8-4452-8be0-80ae8dc867f3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.661714] env[62405]: DEBUG oslo_vmware.api [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Waiting for the task: (returnval){ [ 1784.661714] env[62405]: value = "task-1947510" [ 1784.661714] env[62405]: _type = "Task" [ 1784.661714] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1784.672231] env[62405]: DEBUG oslo_vmware.api [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Task: {'id': task-1947510, 'name': Rename_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1784.745099] env[62405]: DEBUG oslo_vmware.api [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Task: {'id': task-1947507, 'name': ReconfigVM_Task, 'duration_secs': 0.340451} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1784.745099] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] [instance: 742c8d94-48d1-4408-91dc-98f25661aa8d] Reconfigured VM instance instance-00000040 to attach disk [datastore1] 742c8d94-48d1-4408-91dc-98f25661aa8d/742c8d94-48d1-4408-91dc-98f25661aa8d.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1784.745361] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-95755a66-bc5b-487f-af87-b3c913d06a7d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.755816] env[62405]: DEBUG oslo_vmware.api [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Waiting for the task: (returnval){ [ 1784.755816] env[62405]: value = "task-1947511" [ 1784.755816] env[62405]: _type = "Task" [ 1784.755816] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1784.762176] env[62405]: DEBUG oslo_vmware.api [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Task: {'id': task-1947508, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082697} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1784.762833] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] [instance: f410acd2-f786-43bd-ad60-0a6248dedb1c] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1784.763646] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d28ae391-4714-4e9c-a51a-152b6914f31c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.770053] env[62405]: DEBUG oslo_vmware.api [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Task: {'id': task-1947511, 'name': Rename_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1784.792435] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] [instance: f410acd2-f786-43bd-ad60-0a6248dedb1c] Reconfiguring VM instance instance-00000041 to attach disk [datastore1] f410acd2-f786-43bd-ad60-0a6248dedb1c/f410acd2-f786-43bd-ad60-0a6248dedb1c.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1784.793339] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ee098616-d69c-47ce-ab25-79815391e62f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.812690] env[62405]: DEBUG oslo_vmware.api [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Task: {'id': task-1947509, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1784.819595] env[62405]: DEBUG oslo_vmware.api [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Waiting for the task: (returnval){ [ 1784.819595] env[62405]: value = "task-1947512" [ 1784.819595] env[62405]: _type = "Task" [ 1784.819595] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1784.829689] env[62405]: DEBUG oslo_vmware.api [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Task: {'id': task-1947512, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1784.897783] env[62405]: DEBUG oslo_concurrency.lockutils [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Acquiring lock "refresh_cache-48554024-9b6f-44be-b21e-615b25cd790c" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1784.897948] env[62405]: DEBUG oslo_concurrency.lockutils [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Acquired lock "refresh_cache-48554024-9b6f-44be-b21e-615b25cd790c" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1784.898135] env[62405]: DEBUG nova.network.neutron [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 48554024-9b6f-44be-b21e-615b25cd790c] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1784.951931] env[62405]: DEBUG nova.compute.manager [req-d0691316-f5bb-4c54-aae6-87111577adad req-df5cdbed-c1b4-46d7-a3eb-1928cc45e43d service nova] [instance: 48554024-9b6f-44be-b21e-615b25cd790c] Received event network-vif-plugged-483a3097-a1d8-4e46-8db6-09591eb7e3d4 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1784.952167] env[62405]: DEBUG oslo_concurrency.lockutils [req-d0691316-f5bb-4c54-aae6-87111577adad req-df5cdbed-c1b4-46d7-a3eb-1928cc45e43d service nova] Acquiring lock "48554024-9b6f-44be-b21e-615b25cd790c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1784.952403] env[62405]: DEBUG oslo_concurrency.lockutils [req-d0691316-f5bb-4c54-aae6-87111577adad req-df5cdbed-c1b4-46d7-a3eb-1928cc45e43d service nova] Lock "48554024-9b6f-44be-b21e-615b25cd790c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1784.952581] env[62405]: DEBUG oslo_concurrency.lockutils [req-d0691316-f5bb-4c54-aae6-87111577adad req-df5cdbed-c1b4-46d7-a3eb-1928cc45e43d service nova] Lock "48554024-9b6f-44be-b21e-615b25cd790c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1784.952752] env[62405]: DEBUG nova.compute.manager [req-d0691316-f5bb-4c54-aae6-87111577adad req-df5cdbed-c1b4-46d7-a3eb-1928cc45e43d service nova] [instance: 48554024-9b6f-44be-b21e-615b25cd790c] No waiting events found dispatching network-vif-plugged-483a3097-a1d8-4e46-8db6-09591eb7e3d4 {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1784.952918] env[62405]: WARNING nova.compute.manager [req-d0691316-f5bb-4c54-aae6-87111577adad req-df5cdbed-c1b4-46d7-a3eb-1928cc45e43d service nova] [instance: 48554024-9b6f-44be-b21e-615b25cd790c] Received unexpected event network-vif-plugged-483a3097-a1d8-4e46-8db6-09591eb7e3d4 for instance with vm_state building and task_state spawning. [ 1784.953184] env[62405]: DEBUG nova.compute.manager [req-d0691316-f5bb-4c54-aae6-87111577adad req-df5cdbed-c1b4-46d7-a3eb-1928cc45e43d service nova] [instance: 48554024-9b6f-44be-b21e-615b25cd790c] Received event network-changed-483a3097-a1d8-4e46-8db6-09591eb7e3d4 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1784.953363] env[62405]: DEBUG nova.compute.manager [req-d0691316-f5bb-4c54-aae6-87111577adad req-df5cdbed-c1b4-46d7-a3eb-1928cc45e43d service nova] [instance: 48554024-9b6f-44be-b21e-615b25cd790c] Refreshing instance network info cache due to event network-changed-483a3097-a1d8-4e46-8db6-09591eb7e3d4. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1784.953589] env[62405]: DEBUG oslo_concurrency.lockutils [req-d0691316-f5bb-4c54-aae6-87111577adad req-df5cdbed-c1b4-46d7-a3eb-1928cc45e43d service nova] Acquiring lock "refresh_cache-48554024-9b6f-44be-b21e-615b25cd790c" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1785.004991] env[62405]: DEBUG nova.compute.utils [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1785.006586] env[62405]: DEBUG nova.compute.manager [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] [instance: 153adb6e-5381-4e91-881e-8e566a16905a] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1785.006959] env[62405]: DEBUG nova.network.neutron [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] [instance: 153adb6e-5381-4e91-881e-8e566a16905a] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1785.013424] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 9b71f962-2b92-4f7b-bb8d-b50da5130018] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 1785.062866] env[62405]: DEBUG nova.policy [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8d34125804204f3b92e06e7b8738d73a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e0d6dfea772e432289163b14e9e341c1', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1785.172607] env[62405]: DEBUG oslo_vmware.api [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Task: {'id': task-1947510, 'name': Rename_Task, 'duration_secs': 0.21188} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1785.172912] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 7256b956-e41a-40ec-a687-a129a8bafcb6] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1785.173180] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-70709384-efe0-470d-bc6f-f57ef98ffb7a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.183376] env[62405]: DEBUG oslo_vmware.api [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Waiting for the task: (returnval){ [ 1785.183376] env[62405]: value = "task-1947513" [ 1785.183376] env[62405]: _type = "Task" [ 1785.183376] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1785.195152] env[62405]: DEBUG oslo_vmware.api [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Task: {'id': task-1947513, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1785.266074] env[62405]: DEBUG oslo_vmware.api [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Task: {'id': task-1947511, 'name': Rename_Task, 'duration_secs': 0.157028} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1785.266444] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] [instance: 742c8d94-48d1-4408-91dc-98f25661aa8d] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1785.266733] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-df408258-1b20-442d-be1d-ae94096a747f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.277097] env[62405]: DEBUG oslo_vmware.api [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Waiting for the task: (returnval){ [ 1785.277097] env[62405]: value = "task-1947514" [ 1785.277097] env[62405]: _type = "Task" [ 1785.277097] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1785.283566] env[62405]: DEBUG oslo_vmware.api [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Task: {'id': task-1947509, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.540539} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1785.284148] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 9aa9e0de-7314-4d8b-8e9f-b6d330cae914/9aa9e0de-7314-4d8b-8e9f-b6d330cae914.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1785.284363] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 9aa9e0de-7314-4d8b-8e9f-b6d330cae914] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1785.284624] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-360f2d31-5b05-41cf-ae27-daf160e35cf1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.289125] env[62405]: DEBUG oslo_vmware.api [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Task: {'id': task-1947514, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1785.293946] env[62405]: DEBUG oslo_vmware.api [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Waiting for the task: (returnval){ [ 1785.293946] env[62405]: value = "task-1947515" [ 1785.293946] env[62405]: _type = "Task" [ 1785.293946] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1785.303934] env[62405]: DEBUG oslo_vmware.api [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Task: {'id': task-1947515, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1785.330777] env[62405]: DEBUG oslo_vmware.api [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Task: {'id': task-1947512, 'name': ReconfigVM_Task, 'duration_secs': 0.338756} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1785.331086] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] [instance: f410acd2-f786-43bd-ad60-0a6248dedb1c] Reconfigured VM instance instance-00000041 to attach disk [datastore1] f410acd2-f786-43bd-ad60-0a6248dedb1c/f410acd2-f786-43bd-ad60-0a6248dedb1c.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1785.332633] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e7c847d9-1b2a-42fc-bc72-89e42eaeea8a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.338663] env[62405]: DEBUG oslo_vmware.api [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Waiting for the task: (returnval){ [ 1785.338663] env[62405]: value = "task-1947516" [ 1785.338663] env[62405]: _type = "Task" [ 1785.338663] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1785.343832] env[62405]: DEBUG nova.network.neutron [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] [instance: 153adb6e-5381-4e91-881e-8e566a16905a] Successfully created port: 63dd773b-7125-4f8e-a520-2339dbdacbb2 {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1785.349940] env[62405]: DEBUG oslo_vmware.api [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Task: {'id': task-1947516, 'name': Rename_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1785.448911] env[62405]: DEBUG nova.network.neutron [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 48554024-9b6f-44be-b21e-615b25cd790c] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1785.508039] env[62405]: DEBUG nova.compute.manager [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] [instance: 153adb6e-5381-4e91-881e-8e566a16905a] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1785.517419] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 0491dc4b-cf35-4035-aca9-baf43b86af7e] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 1785.602592] env[62405]: DEBUG nova.network.neutron [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] [instance: 153adb6e-5381-4e91-881e-8e566a16905a] Successfully created port: aa6cc405-9347-42f7-8532-fbf0538c5ed8 {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1785.608113] env[62405]: DEBUG nova.network.neutron [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 48554024-9b6f-44be-b21e-615b25cd790c] Updating instance_info_cache with network_info: [{"id": "483a3097-a1d8-4e46-8db6-09591eb7e3d4", "address": "fa:16:3e:d9:83:3c", "network": {"id": "858b74e5-8ffc-4b81-833e-5eb423dbf510", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-895575651-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d28fa9475f2f4a149bf00ccc63e70e3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37e8d2ee-abfc-42e2-a8fa-ee5447f1f1da", "external-id": "nsx-vlan-transportzone-813", "segmentation_id": 813, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap483a3097-a1", "ovs_interfaceid": "483a3097-a1d8-4e46-8db6-09591eb7e3d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1785.695355] env[62405]: DEBUG oslo_vmware.api [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Task: {'id': task-1947513, 'name': PowerOnVM_Task, 'duration_secs': 0.50584} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1785.695653] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 7256b956-e41a-40ec-a687-a129a8bafcb6] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1785.695856] env[62405]: INFO nova.compute.manager [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 7256b956-e41a-40ec-a687-a129a8bafcb6] Took 7.90 seconds to spawn the instance on the hypervisor. [ 1785.696161] env[62405]: DEBUG nova.compute.manager [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 7256b956-e41a-40ec-a687-a129a8bafcb6] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1785.697062] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51d477e0-372e-4db5-ae0b-97bf3032aa6b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.790371] env[62405]: DEBUG oslo_vmware.api [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Task: {'id': task-1947514, 'name': PowerOnVM_Task, 'duration_secs': 0.471538} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1785.790660] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] [instance: 742c8d94-48d1-4408-91dc-98f25661aa8d] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1785.790863] env[62405]: INFO nova.compute.manager [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] [instance: 742c8d94-48d1-4408-91dc-98f25661aa8d] Took 13.05 seconds to spawn the instance on the hypervisor. [ 1785.791060] env[62405]: DEBUG nova.compute.manager [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] [instance: 742c8d94-48d1-4408-91dc-98f25661aa8d] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1785.791846] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-139859b5-23e7-4880-aad7-86e7e6db42d7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.803507] env[62405]: DEBUG oslo_vmware.api [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Task: {'id': task-1947515, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.084227} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1785.805380] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 9aa9e0de-7314-4d8b-8e9f-b6d330cae914] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1785.809687] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94339fba-9ada-48bc-929d-4eb3db3235ac {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.839074] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 9aa9e0de-7314-4d8b-8e9f-b6d330cae914] Reconfiguring VM instance instance-00000043 to attach disk [datastore1] 9aa9e0de-7314-4d8b-8e9f-b6d330cae914/9aa9e0de-7314-4d8b-8e9f-b6d330cae914.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1785.843922] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a7c24531-2f1c-4891-92f3-c550dad7d85d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.868390] env[62405]: DEBUG oslo_vmware.api [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Task: {'id': task-1947516, 'name': Rename_Task, 'duration_secs': 0.1394} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1785.869943] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] [instance: f410acd2-f786-43bd-ad60-0a6248dedb1c] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1785.870240] env[62405]: DEBUG oslo_vmware.api [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Waiting for the task: (returnval){ [ 1785.870240] env[62405]: value = "task-1947517" [ 1785.870240] env[62405]: _type = "Task" [ 1785.870240] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1785.871294] env[62405]: DEBUG nova.network.neutron [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] [instance: 153adb6e-5381-4e91-881e-8e566a16905a] Successfully created port: e9e348a3-5e47-451e-982d-38f1c834ac1b {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1785.875296] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4448656d-f30f-4fe6-a00a-bb519a0959ab {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.888025] env[62405]: DEBUG oslo_vmware.api [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Task: {'id': task-1947517, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1785.891757] env[62405]: DEBUG oslo_vmware.api [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Waiting for the task: (returnval){ [ 1785.891757] env[62405]: value = "task-1947518" [ 1785.891757] env[62405]: _type = "Task" [ 1785.891757] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1785.900844] env[62405]: DEBUG oslo_vmware.api [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Task: {'id': task-1947518, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1785.980102] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-208b6c4f-cdd9-4729-b076-3b434cda2753 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.988368] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ce29b81-a5e8-493d-b9d5-01f560f03109 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.022881] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7904201-192f-42cd-95a4-7c87a0ab14bd {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.026059] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 8624629d-642a-4adf-984e-3925beeb4fef] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 1786.032826] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f1d2d13-b117-4a90-8f5e-6d5d119ebf06 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.046590] env[62405]: DEBUG nova.compute.provider_tree [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1786.108723] env[62405]: DEBUG oslo_concurrency.lockutils [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Releasing lock "refresh_cache-48554024-9b6f-44be-b21e-615b25cd790c" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1786.109109] env[62405]: DEBUG nova.compute.manager [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 48554024-9b6f-44be-b21e-615b25cd790c] Instance network_info: |[{"id": "483a3097-a1d8-4e46-8db6-09591eb7e3d4", "address": "fa:16:3e:d9:83:3c", "network": {"id": "858b74e5-8ffc-4b81-833e-5eb423dbf510", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-895575651-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d28fa9475f2f4a149bf00ccc63e70e3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37e8d2ee-abfc-42e2-a8fa-ee5447f1f1da", "external-id": "nsx-vlan-transportzone-813", "segmentation_id": 813, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap483a3097-a1", "ovs_interfaceid": "483a3097-a1d8-4e46-8db6-09591eb7e3d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1786.109425] env[62405]: DEBUG oslo_concurrency.lockutils [req-d0691316-f5bb-4c54-aae6-87111577adad req-df5cdbed-c1b4-46d7-a3eb-1928cc45e43d service nova] Acquired lock "refresh_cache-48554024-9b6f-44be-b21e-615b25cd790c" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1786.109613] env[62405]: DEBUG nova.network.neutron [req-d0691316-f5bb-4c54-aae6-87111577adad req-df5cdbed-c1b4-46d7-a3eb-1928cc45e43d service nova] [instance: 48554024-9b6f-44be-b21e-615b25cd790c] Refreshing network info cache for port 483a3097-a1d8-4e46-8db6-09591eb7e3d4 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1786.110819] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 48554024-9b6f-44be-b21e-615b25cd790c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d9:83:3c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '37e8d2ee-abfc-42e2-a8fa-ee5447f1f1da', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '483a3097-a1d8-4e46-8db6-09591eb7e3d4', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1786.119443] env[62405]: DEBUG oslo.service.loopingcall [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1786.120412] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 48554024-9b6f-44be-b21e-615b25cd790c] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1786.120656] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5520dc31-323f-4461-8982-a927fc58e3e6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.142291] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1786.142291] env[62405]: value = "task-1947519" [ 1786.142291] env[62405]: _type = "Task" [ 1786.142291] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1786.151015] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947519, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1786.217638] env[62405]: INFO nova.compute.manager [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 7256b956-e41a-40ec-a687-a129a8bafcb6] Took 60.85 seconds to build instance. [ 1786.322677] env[62405]: INFO nova.compute.manager [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] [instance: 742c8d94-48d1-4408-91dc-98f25661aa8d] Took 72.88 seconds to build instance. [ 1786.387677] env[62405]: DEBUG oslo_vmware.api [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Task: {'id': task-1947517, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1786.401526] env[62405]: DEBUG oslo_vmware.api [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Task: {'id': task-1947518, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1786.527534] env[62405]: DEBUG nova.compute.manager [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] [instance: 153adb6e-5381-4e91-881e-8e566a16905a] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1786.529996] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 262424b0-dc7d-4b6c-9539-2d6cd23a93da] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 1786.561078] env[62405]: DEBUG nova.virt.hardware [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1786.561327] env[62405]: DEBUG nova.virt.hardware [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1786.561506] env[62405]: DEBUG nova.virt.hardware [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1786.561729] env[62405]: DEBUG nova.virt.hardware [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1786.561913] env[62405]: DEBUG nova.virt.hardware [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1786.562205] env[62405]: DEBUG nova.virt.hardware [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1786.563672] env[62405]: DEBUG nova.virt.hardware [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1786.563672] env[62405]: DEBUG nova.virt.hardware [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1786.563672] env[62405]: DEBUG nova.virt.hardware [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1786.563672] env[62405]: DEBUG nova.virt.hardware [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1786.563672] env[62405]: DEBUG nova.virt.hardware [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1786.565130] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fbc2cb7-53f3-4d79-ae79-564107df233d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.575705] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b7d836d-6030-471b-a14d-c11481870501 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.585269] env[62405]: ERROR nova.scheduler.client.report [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [req-b5fbd250-d82d-4049-834e-e18078110b56] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7d5eded7-a501-4fa6-b1d3-60e273d555d7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-b5fbd250-d82d-4049-834e-e18078110b56"}]} [ 1786.611370] env[62405]: DEBUG nova.scheduler.client.report [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Refreshing inventories for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1786.629385] env[62405]: DEBUG nova.scheduler.client.report [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Updating ProviderTree inventory for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1786.629618] env[62405]: DEBUG nova.compute.provider_tree [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1786.641295] env[62405]: DEBUG nova.scheduler.client.report [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Refreshing aggregate associations for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7, aggregates: None {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1786.652589] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947519, 'name': CreateVM_Task, 'duration_secs': 0.43881} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1786.652852] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 48554024-9b6f-44be-b21e-615b25cd790c] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1786.657024] env[62405]: DEBUG oslo_concurrency.lockutils [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1786.657024] env[62405]: DEBUG oslo_concurrency.lockutils [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1786.657024] env[62405]: DEBUG oslo_concurrency.lockutils [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1786.657024] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4bdcb063-ef8b-4f4e-bdaa-d697c4ada2b8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.658852] env[62405]: DEBUG oslo_vmware.api [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Waiting for the task: (returnval){ [ 1786.658852] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]522d97b0-731f-ccdd-61ad-4564e3ddc25c" [ 1786.658852] env[62405]: _type = "Task" [ 1786.658852] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1786.662747] env[62405]: DEBUG nova.scheduler.client.report [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Refreshing trait associations for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1786.670028] env[62405]: DEBUG oslo_vmware.api [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]522d97b0-731f-ccdd-61ad-4564e3ddc25c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1786.722052] env[62405]: DEBUG oslo_concurrency.lockutils [None req-49d3277c-7ba7-49a0-985d-da773c85dd9a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Lock "7256b956-e41a-40ec-a687-a129a8bafcb6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 81.970s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1786.825632] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9f977f98-3656-47ce-9d34-75bbe94fb5b0 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Lock "742c8d94-48d1-4408-91dc-98f25661aa8d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 87.966s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1786.887269] env[62405]: DEBUG oslo_vmware.api [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Task: {'id': task-1947517, 'name': ReconfigVM_Task, 'duration_secs': 0.722966} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1786.891728] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 9aa9e0de-7314-4d8b-8e9f-b6d330cae914] Reconfigured VM instance instance-00000043 to attach disk [datastore1] 9aa9e0de-7314-4d8b-8e9f-b6d330cae914/9aa9e0de-7314-4d8b-8e9f-b6d330cae914.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1786.893198] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b1022f42-38a2-494e-b91b-8403bb4761a7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.902133] env[62405]: DEBUG oslo_vmware.api [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Task: {'id': task-1947518, 'name': PowerOnVM_Task, 'duration_secs': 0.87807} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1786.905521] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] [instance: f410acd2-f786-43bd-ad60-0a6248dedb1c] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1786.905585] env[62405]: INFO nova.compute.manager [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] [instance: f410acd2-f786-43bd-ad60-0a6248dedb1c] Took 11.66 seconds to spawn the instance on the hypervisor. [ 1786.905753] env[62405]: DEBUG nova.compute.manager [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] [instance: f410acd2-f786-43bd-ad60-0a6248dedb1c] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1786.906105] env[62405]: DEBUG oslo_vmware.api [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Waiting for the task: (returnval){ [ 1786.906105] env[62405]: value = "task-1947520" [ 1786.906105] env[62405]: _type = "Task" [ 1786.906105] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1786.906960] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b7a0492-07e5-4514-97ad-1c2b07d6473f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.924056] env[62405]: DEBUG oslo_vmware.api [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Task: {'id': task-1947520, 'name': Rename_Task} progress is 10%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1787.039462] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 6199de01-baca-4461-9572-111eda11adac] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 1787.118776] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58e4e004-b35e-40dc-b80d-997a9bc16256 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.130501] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce4bb27b-55ef-4343-b478-f85e27e7b641 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.168080] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09a8c09d-7794-460a-9703-c87ec2f56a99 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.176799] env[62405]: DEBUG oslo_vmware.api [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]522d97b0-731f-ccdd-61ad-4564e3ddc25c, 'name': SearchDatastore_Task, 'duration_secs': 0.015066} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1787.178784] env[62405]: DEBUG oslo_concurrency.lockutils [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1787.179046] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 48554024-9b6f-44be-b21e-615b25cd790c] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1787.179474] env[62405]: DEBUG oslo_concurrency.lockutils [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1787.179474] env[62405]: DEBUG oslo_concurrency.lockutils [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1787.179623] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1787.179878] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b8e64842-4da4-4b6a-a8c7-06f85fa00ec2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.183439] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10f5a102-1f76-4163-a3dd-aa838afae958 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.200772] env[62405]: DEBUG nova.compute.provider_tree [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1787.203828] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1787.204016] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1787.204747] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-be75c18b-6d45-45c2-9757-a1ef5db453c6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.210858] env[62405]: DEBUG oslo_vmware.api [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Waiting for the task: (returnval){ [ 1787.210858] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5245deba-4909-1895-c03a-22e2e1ee1045" [ 1787.210858] env[62405]: _type = "Task" [ 1787.210858] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1787.220280] env[62405]: DEBUG oslo_vmware.api [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5245deba-4909-1895-c03a-22e2e1ee1045, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1787.228810] env[62405]: DEBUG nova.compute.manager [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: af174cbf-3555-42b0-bacd-033f9ff46f08] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1787.331192] env[62405]: DEBUG nova.compute.manager [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1787.355657] env[62405]: DEBUG nova.network.neutron [req-d0691316-f5bb-4c54-aae6-87111577adad req-df5cdbed-c1b4-46d7-a3eb-1928cc45e43d service nova] [instance: 48554024-9b6f-44be-b21e-615b25cd790c] Updated VIF entry in instance network info cache for port 483a3097-a1d8-4e46-8db6-09591eb7e3d4. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1787.356082] env[62405]: DEBUG nova.network.neutron [req-d0691316-f5bb-4c54-aae6-87111577adad req-df5cdbed-c1b4-46d7-a3eb-1928cc45e43d service nova] [instance: 48554024-9b6f-44be-b21e-615b25cd790c] Updating instance_info_cache with network_info: [{"id": "483a3097-a1d8-4e46-8db6-09591eb7e3d4", "address": "fa:16:3e:d9:83:3c", "network": {"id": "858b74e5-8ffc-4b81-833e-5eb423dbf510", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-895575651-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d28fa9475f2f4a149bf00ccc63e70e3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37e8d2ee-abfc-42e2-a8fa-ee5447f1f1da", "external-id": "nsx-vlan-transportzone-813", "segmentation_id": 813, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap483a3097-a1", "ovs_interfaceid": "483a3097-a1d8-4e46-8db6-09591eb7e3d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1787.395800] env[62405]: DEBUG nova.compute.manager [req-1f08ac4c-cb02-4a08-b2d1-adea25163bed req-1f007833-cfb9-4d8a-bf92-8fe9adcbe7a5 service nova] [instance: 153adb6e-5381-4e91-881e-8e566a16905a] Received event network-vif-plugged-63dd773b-7125-4f8e-a520-2339dbdacbb2 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1787.396035] env[62405]: DEBUG oslo_concurrency.lockutils [req-1f08ac4c-cb02-4a08-b2d1-adea25163bed req-1f007833-cfb9-4d8a-bf92-8fe9adcbe7a5 service nova] Acquiring lock "153adb6e-5381-4e91-881e-8e566a16905a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1787.398300] env[62405]: DEBUG oslo_concurrency.lockutils [req-1f08ac4c-cb02-4a08-b2d1-adea25163bed req-1f007833-cfb9-4d8a-bf92-8fe9adcbe7a5 service nova] Lock "153adb6e-5381-4e91-881e-8e566a16905a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.002s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1787.398499] env[62405]: DEBUG oslo_concurrency.lockutils [req-1f08ac4c-cb02-4a08-b2d1-adea25163bed req-1f007833-cfb9-4d8a-bf92-8fe9adcbe7a5 service nova] Lock "153adb6e-5381-4e91-881e-8e566a16905a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1787.398700] env[62405]: DEBUG nova.compute.manager [req-1f08ac4c-cb02-4a08-b2d1-adea25163bed req-1f007833-cfb9-4d8a-bf92-8fe9adcbe7a5 service nova] [instance: 153adb6e-5381-4e91-881e-8e566a16905a] No waiting events found dispatching network-vif-plugged-63dd773b-7125-4f8e-a520-2339dbdacbb2 {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1787.398945] env[62405]: WARNING nova.compute.manager [req-1f08ac4c-cb02-4a08-b2d1-adea25163bed req-1f007833-cfb9-4d8a-bf92-8fe9adcbe7a5 service nova] [instance: 153adb6e-5381-4e91-881e-8e566a16905a] Received unexpected event network-vif-plugged-63dd773b-7125-4f8e-a520-2339dbdacbb2 for instance with vm_state building and task_state spawning. [ 1787.419139] env[62405]: DEBUG oslo_vmware.api [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Task: {'id': task-1947520, 'name': Rename_Task, 'duration_secs': 0.271079} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1787.419276] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 9aa9e0de-7314-4d8b-8e9f-b6d330cae914] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1787.419516] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5876529f-fbd9-489c-8818-f571caef8abe {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.430399] env[62405]: DEBUG oslo_vmware.api [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Waiting for the task: (returnval){ [ 1787.430399] env[62405]: value = "task-1947521" [ 1787.430399] env[62405]: _type = "Task" [ 1787.430399] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1787.436072] env[62405]: INFO nova.compute.manager [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] [instance: f410acd2-f786-43bd-ad60-0a6248dedb1c] Took 63.14 seconds to build instance. [ 1787.440656] env[62405]: DEBUG nova.network.neutron [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] [instance: 153adb6e-5381-4e91-881e-8e566a16905a] Successfully updated port: 63dd773b-7125-4f8e-a520-2339dbdacbb2 {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1787.448688] env[62405]: DEBUG oslo_vmware.api [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Task: {'id': task-1947521, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1787.542756] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 3c0b964f-c900-4704-ae12-7eba7952f678] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 1787.638820] env[62405]: DEBUG nova.compute.manager [req-4014cee0-a217-4282-a5e5-34da97bd1865 req-9b4bfc57-2845-4bff-97fe-42c5abf02c02 service nova] [instance: 742c8d94-48d1-4408-91dc-98f25661aa8d] Received event network-changed-4cdbc50f-67a4-4007-ba8c-4b0690bb67c5 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1787.639750] env[62405]: DEBUG nova.compute.manager [req-4014cee0-a217-4282-a5e5-34da97bd1865 req-9b4bfc57-2845-4bff-97fe-42c5abf02c02 service nova] [instance: 742c8d94-48d1-4408-91dc-98f25661aa8d] Refreshing instance network info cache due to event network-changed-4cdbc50f-67a4-4007-ba8c-4b0690bb67c5. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1787.640113] env[62405]: DEBUG oslo_concurrency.lockutils [req-4014cee0-a217-4282-a5e5-34da97bd1865 req-9b4bfc57-2845-4bff-97fe-42c5abf02c02 service nova] Acquiring lock "refresh_cache-742c8d94-48d1-4408-91dc-98f25661aa8d" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1787.640260] env[62405]: DEBUG oslo_concurrency.lockutils [req-4014cee0-a217-4282-a5e5-34da97bd1865 req-9b4bfc57-2845-4bff-97fe-42c5abf02c02 service nova] Acquired lock "refresh_cache-742c8d94-48d1-4408-91dc-98f25661aa8d" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1787.640441] env[62405]: DEBUG nova.network.neutron [req-4014cee0-a217-4282-a5e5-34da97bd1865 req-9b4bfc57-2845-4bff-97fe-42c5abf02c02 service nova] [instance: 742c8d94-48d1-4408-91dc-98f25661aa8d] Refreshing network info cache for port 4cdbc50f-67a4-4007-ba8c-4b0690bb67c5 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1787.723318] env[62405]: DEBUG oslo_vmware.api [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5245deba-4909-1895-c03a-22e2e1ee1045, 'name': SearchDatastore_Task, 'duration_secs': 0.031314} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1787.724373] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a90322ed-9b5b-4dcb-b125-8ab082940bea {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.731161] env[62405]: DEBUG oslo_vmware.api [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Waiting for the task: (returnval){ [ 1787.731161] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52115a26-956f-0590-ed91-aa69e5fa9b5c" [ 1787.731161] env[62405]: _type = "Task" [ 1787.731161] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1787.746009] env[62405]: DEBUG oslo_vmware.api [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52115a26-956f-0590-ed91-aa69e5fa9b5c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1787.747143] env[62405]: DEBUG nova.scheduler.client.report [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Updated inventory for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with generation 111 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1787.748023] env[62405]: DEBUG nova.compute.provider_tree [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Updating resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 generation from 111 to 112 during operation: update_inventory {{(pid=62405) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1787.748023] env[62405]: DEBUG nova.compute.provider_tree [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1787.758400] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1787.849650] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1787.858374] env[62405]: DEBUG oslo_concurrency.lockutils [req-d0691316-f5bb-4c54-aae6-87111577adad req-df5cdbed-c1b4-46d7-a3eb-1928cc45e43d service nova] Releasing lock "refresh_cache-48554024-9b6f-44be-b21e-615b25cd790c" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1787.943358] env[62405]: DEBUG oslo_concurrency.lockutils [None req-85da9e86-3937-4c13-9a11-2eaba542a10b tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Lock "f410acd2-f786-43bd-ad60-0a6248dedb1c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 88.229s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1787.945793] env[62405]: DEBUG oslo_vmware.api [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Task: {'id': task-1947521, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1788.053977] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 2257c786-54f9-441a-832c-cf3178bfcc78] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 1788.252258] env[62405]: DEBUG oslo_vmware.api [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52115a26-956f-0590-ed91-aa69e5fa9b5c, 'name': SearchDatastore_Task, 'duration_secs': 0.024386} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1788.252258] env[62405]: DEBUG oslo_concurrency.lockutils [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1788.252258] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 48554024-9b6f-44be-b21e-615b25cd790c/48554024-9b6f-44be-b21e-615b25cd790c.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1788.252258] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5bf45f28-e0f1-48db-9ca1-544417f49d16 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.255660] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.760s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1788.256257] env[62405]: DEBUG nova.compute.manager [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1788.264996] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f9f76991-3d7f-4093-aa61-e98012271936 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 42.050s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1788.275861] env[62405]: DEBUG oslo_vmware.api [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Waiting for the task: (returnval){ [ 1788.275861] env[62405]: value = "task-1947522" [ 1788.275861] env[62405]: _type = "Task" [ 1788.275861] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1788.289074] env[62405]: DEBUG oslo_vmware.api [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Task: {'id': task-1947522, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1788.443568] env[62405]: DEBUG oslo_vmware.api [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Task: {'id': task-1947521, 'name': PowerOnVM_Task, 'duration_secs': 0.869219} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1788.443848] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 9aa9e0de-7314-4d8b-8e9f-b6d330cae914] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1788.444131] env[62405]: INFO nova.compute.manager [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 9aa9e0de-7314-4d8b-8e9f-b6d330cae914] Took 8.10 seconds to spawn the instance on the hypervisor. [ 1788.444349] env[62405]: DEBUG nova.compute.manager [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 9aa9e0de-7314-4d8b-8e9f-b6d330cae914] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1788.446122] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c96f71b2-d20f-4bca-b59c-2132169d35c6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.451713] env[62405]: DEBUG nova.compute.manager [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: 00158b10-4292-48f3-85a0-991af1dbc5f1] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1788.557540] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1788.558073] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Cleaning up deleted instances with incomplete migration {{(pid=62405) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11784}} [ 1788.608119] env[62405]: DEBUG nova.network.neutron [req-4014cee0-a217-4282-a5e5-34da97bd1865 req-9b4bfc57-2845-4bff-97fe-42c5abf02c02 service nova] [instance: 742c8d94-48d1-4408-91dc-98f25661aa8d] Updated VIF entry in instance network info cache for port 4cdbc50f-67a4-4007-ba8c-4b0690bb67c5. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1788.608529] env[62405]: DEBUG nova.network.neutron [req-4014cee0-a217-4282-a5e5-34da97bd1865 req-9b4bfc57-2845-4bff-97fe-42c5abf02c02 service nova] [instance: 742c8d94-48d1-4408-91dc-98f25661aa8d] Updating instance_info_cache with network_info: [{"id": "4cdbc50f-67a4-4007-ba8c-4b0690bb67c5", "address": "fa:16:3e:1b:ec:6c", "network": {"id": "d6172d52-fb9e-4751-8a35-39c4d4546683", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-778897133-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.213", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "650fd0d6b10b4b88aac64a5b51c10ee7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8f580e6-1d86-41ee-9ebe-c531cb9299c6", "external-id": "nsx-vlan-transportzone-150", "segmentation_id": 150, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4cdbc50f-67", "ovs_interfaceid": "4cdbc50f-67a4-4007-ba8c-4b0690bb67c5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1788.763471] env[62405]: DEBUG nova.compute.utils [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1788.764994] env[62405]: DEBUG nova.compute.manager [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1788.765196] env[62405]: DEBUG nova.network.neutron [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1788.769559] env[62405]: INFO nova.compute.claims [None req-f9f76991-3d7f-4093-aa61-e98012271936 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a1a84837-deef-4ffc-8a47-4891bfc2c87a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1788.773133] env[62405]: DEBUG oslo_concurrency.lockutils [None req-95031a91-3c8c-4837-a4d7-da95100f0fbb tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Acquiring lock "f410acd2-f786-43bd-ad60-0a6248dedb1c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1788.773373] env[62405]: DEBUG oslo_concurrency.lockutils [None req-95031a91-3c8c-4837-a4d7-da95100f0fbb tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Lock "f410acd2-f786-43bd-ad60-0a6248dedb1c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1788.773576] env[62405]: DEBUG oslo_concurrency.lockutils [None req-95031a91-3c8c-4837-a4d7-da95100f0fbb tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Acquiring lock "f410acd2-f786-43bd-ad60-0a6248dedb1c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1788.773761] env[62405]: DEBUG oslo_concurrency.lockutils [None req-95031a91-3c8c-4837-a4d7-da95100f0fbb tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Lock "f410acd2-f786-43bd-ad60-0a6248dedb1c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1788.773928] env[62405]: DEBUG oslo_concurrency.lockutils [None req-95031a91-3c8c-4837-a4d7-da95100f0fbb tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Lock "f410acd2-f786-43bd-ad60-0a6248dedb1c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1788.779447] env[62405]: INFO nova.compute.manager [None req-95031a91-3c8c-4837-a4d7-da95100f0fbb tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] [instance: f410acd2-f786-43bd-ad60-0a6248dedb1c] Terminating instance [ 1788.794382] env[62405]: DEBUG oslo_vmware.api [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Task: {'id': task-1947522, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1788.817930] env[62405]: DEBUG nova.policy [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '13540c2dbc2b43bcb151ec7b5894904c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ba9083cddcc24345b6ea5d2cbbbec5ba', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1788.978715] env[62405]: INFO nova.compute.manager [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 9aa9e0de-7314-4d8b-8e9f-b6d330cae914] Took 58.58 seconds to build instance. [ 1788.990459] env[62405]: DEBUG oslo_concurrency.lockutils [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1789.060035] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1789.111483] env[62405]: DEBUG oslo_concurrency.lockutils [req-4014cee0-a217-4282-a5e5-34da97bd1865 req-9b4bfc57-2845-4bff-97fe-42c5abf02c02 service nova] Releasing lock "refresh_cache-742c8d94-48d1-4408-91dc-98f25661aa8d" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1789.178633] env[62405]: DEBUG nova.network.neutron [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Successfully created port: 09308517-a17c-48d3-b01f-fed73b19adfd {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1789.268088] env[62405]: DEBUG nova.compute.manager [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1789.280526] env[62405]: INFO nova.compute.resource_tracker [None req-f9f76991-3d7f-4093-aa61-e98012271936 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a1a84837-deef-4ffc-8a47-4891bfc2c87a] Updating resource usage from migration b4c9b590-842e-4bbd-bf8c-7c1854c857a2 [ 1789.289117] env[62405]: DEBUG nova.compute.manager [None req-95031a91-3c8c-4837-a4d7-da95100f0fbb tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] [instance: f410acd2-f786-43bd-ad60-0a6248dedb1c] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1789.289117] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-95031a91-3c8c-4837-a4d7-da95100f0fbb tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] [instance: f410acd2-f786-43bd-ad60-0a6248dedb1c] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1789.289315] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a8082c4-ce21-45b8-b66c-ff7d0710ce8a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.301934] env[62405]: DEBUG oslo_vmware.api [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Task: {'id': task-1947522, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.6291} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1789.305615] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 48554024-9b6f-44be-b21e-615b25cd790c/48554024-9b6f-44be-b21e-615b25cd790c.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1789.306015] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 48554024-9b6f-44be-b21e-615b25cd790c] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1789.306466] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-95031a91-3c8c-4837-a4d7-da95100f0fbb tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] [instance: f410acd2-f786-43bd-ad60-0a6248dedb1c] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1789.310431] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a6aeaf32-b413-41da-80b5-03a8434c8160 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.313647] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-99e292a6-c043-42ec-b45e-02ff31102925 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.322201] env[62405]: DEBUG oslo_vmware.api [None req-95031a91-3c8c-4837-a4d7-da95100f0fbb tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Waiting for the task: (returnval){ [ 1789.322201] env[62405]: value = "task-1947523" [ 1789.322201] env[62405]: _type = "Task" [ 1789.322201] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1789.323541] env[62405]: DEBUG oslo_vmware.api [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Waiting for the task: (returnval){ [ 1789.323541] env[62405]: value = "task-1947524" [ 1789.323541] env[62405]: _type = "Task" [ 1789.323541] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1789.340580] env[62405]: DEBUG oslo_vmware.api [None req-95031a91-3c8c-4837-a4d7-da95100f0fbb tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Task: {'id': task-1947523, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1789.344096] env[62405]: DEBUG oslo_vmware.api [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Task: {'id': task-1947524, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1789.432097] env[62405]: DEBUG nova.compute.manager [req-f06ce5f4-afd5-4cab-a2d8-3437703a8ba7 req-4d02d23e-fa62-4e1c-b361-c3442874fb2c service nova] [instance: 153adb6e-5381-4e91-881e-8e566a16905a] Received event network-changed-63dd773b-7125-4f8e-a520-2339dbdacbb2 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1789.432387] env[62405]: DEBUG nova.compute.manager [req-f06ce5f4-afd5-4cab-a2d8-3437703a8ba7 req-4d02d23e-fa62-4e1c-b361-c3442874fb2c service nova] [instance: 153adb6e-5381-4e91-881e-8e566a16905a] Refreshing instance network info cache due to event network-changed-63dd773b-7125-4f8e-a520-2339dbdacbb2. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1789.432462] env[62405]: DEBUG oslo_concurrency.lockutils [req-f06ce5f4-afd5-4cab-a2d8-3437703a8ba7 req-4d02d23e-fa62-4e1c-b361-c3442874fb2c service nova] Acquiring lock "refresh_cache-153adb6e-5381-4e91-881e-8e566a16905a" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1789.432611] env[62405]: DEBUG oslo_concurrency.lockutils [req-f06ce5f4-afd5-4cab-a2d8-3437703a8ba7 req-4d02d23e-fa62-4e1c-b361-c3442874fb2c service nova] Acquired lock "refresh_cache-153adb6e-5381-4e91-881e-8e566a16905a" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1789.432748] env[62405]: DEBUG nova.network.neutron [req-f06ce5f4-afd5-4cab-a2d8-3437703a8ba7 req-4d02d23e-fa62-4e1c-b361-c3442874fb2c service nova] [instance: 153adb6e-5381-4e91-881e-8e566a16905a] Refreshing network info cache for port 63dd773b-7125-4f8e-a520-2339dbdacbb2 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1789.480618] env[62405]: DEBUG oslo_concurrency.lockutils [None req-77eb464a-13f7-4492-824d-5018abb080f4 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Lock "9aa9e0de-7314-4d8b-8e9f-b6d330cae914" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 84.030s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1789.553312] env[62405]: DEBUG nova.network.neutron [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] [instance: 153adb6e-5381-4e91-881e-8e566a16905a] Successfully updated port: aa6cc405-9347-42f7-8532-fbf0538c5ed8 {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1789.739555] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7af6a287-4e78-429c-9898-dce21e98da56 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.748303] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3f7ea11-9b9d-41ed-b8c6-055f97a078b1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.780374] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-897f30f1-bb9b-4848-ad77-95a23bf9b0b5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.788703] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64adaac2-020c-446e-9b90-a0eb613afb32 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.802011] env[62405]: DEBUG nova.compute.provider_tree [None req-f9f76991-3d7f-4093-aa61-e98012271936 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1789.837305] env[62405]: DEBUG oslo_vmware.api [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Task: {'id': task-1947524, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080486} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1789.840206] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 48554024-9b6f-44be-b21e-615b25cd790c] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1789.840504] env[62405]: DEBUG oslo_vmware.api [None req-95031a91-3c8c-4837-a4d7-da95100f0fbb tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Task: {'id': task-1947523, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1789.841198] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-984a8c3c-d926-4c0d-95c3-122e93b74885 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.863493] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 48554024-9b6f-44be-b21e-615b25cd790c] Reconfiguring VM instance instance-00000044 to attach disk [datastore1] 48554024-9b6f-44be-b21e-615b25cd790c/48554024-9b6f-44be-b21e-615b25cd790c.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1789.863824] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f1ad272e-b62c-46fb-9fc0-4041a88a5e04 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.884074] env[62405]: DEBUG oslo_vmware.api [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Waiting for the task: (returnval){ [ 1789.884074] env[62405]: value = "task-1947525" [ 1789.884074] env[62405]: _type = "Task" [ 1789.884074] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1789.892322] env[62405]: DEBUG oslo_vmware.api [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Task: {'id': task-1947525, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1789.979033] env[62405]: DEBUG nova.network.neutron [req-f06ce5f4-afd5-4cab-a2d8-3437703a8ba7 req-4d02d23e-fa62-4e1c-b361-c3442874fb2c service nova] [instance: 153adb6e-5381-4e91-881e-8e566a16905a] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1789.985618] env[62405]: DEBUG nova.compute.manager [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: b4b89cf6-4159-40fa-8b67-4d8bbf16eb32] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1790.072226] env[62405]: DEBUG nova.network.neutron [req-f06ce5f4-afd5-4cab-a2d8-3437703a8ba7 req-4d02d23e-fa62-4e1c-b361-c3442874fb2c service nova] [instance: 153adb6e-5381-4e91-881e-8e566a16905a] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1790.285610] env[62405]: DEBUG nova.compute.manager [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1790.307905] env[62405]: DEBUG nova.scheduler.client.report [None req-f9f76991-3d7f-4093-aa61-e98012271936 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1790.319956] env[62405]: DEBUG nova.virt.hardware [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1790.320289] env[62405]: DEBUG nova.virt.hardware [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1790.320493] env[62405]: DEBUG nova.virt.hardware [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1790.320729] env[62405]: DEBUG nova.virt.hardware [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1790.320849] env[62405]: DEBUG nova.virt.hardware [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1790.321010] env[62405]: DEBUG nova.virt.hardware [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1790.321320] env[62405]: DEBUG nova.virt.hardware [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1790.321488] env[62405]: DEBUG nova.virt.hardware [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1790.321656] env[62405]: DEBUG nova.virt.hardware [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1790.321821] env[62405]: DEBUG nova.virt.hardware [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1790.322007] env[62405]: DEBUG nova.virt.hardware [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1790.323337] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-274a12ec-f024-423f-a72f-8a17289bdfd5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.338525] env[62405]: DEBUG oslo_vmware.api [None req-95031a91-3c8c-4837-a4d7-da95100f0fbb tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Task: {'id': task-1947523, 'name': PowerOffVM_Task, 'duration_secs': 0.711719} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1790.339696] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebad484a-4f31-4026-a97f-6ff9c26ba9a5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.344807] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-95031a91-3c8c-4837-a4d7-da95100f0fbb tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] [instance: f410acd2-f786-43bd-ad60-0a6248dedb1c] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1790.345073] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-95031a91-3c8c-4837-a4d7-da95100f0fbb tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] [instance: f410acd2-f786-43bd-ad60-0a6248dedb1c] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1790.345376] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3309fa5a-93fe-4943-8bb0-d1fb919f2dea {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.392385] env[62405]: DEBUG oslo_vmware.api [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Task: {'id': task-1947525, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1790.448914] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-95031a91-3c8c-4837-a4d7-da95100f0fbb tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] [instance: f410acd2-f786-43bd-ad60-0a6248dedb1c] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1790.449387] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-95031a91-3c8c-4837-a4d7-da95100f0fbb tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] [instance: f410acd2-f786-43bd-ad60-0a6248dedb1c] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1790.449387] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-95031a91-3c8c-4837-a4d7-da95100f0fbb tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Deleting the datastore file [datastore1] f410acd2-f786-43bd-ad60-0a6248dedb1c {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1790.449576] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f99984fc-2931-46c4-ada0-19ae0c5cc198 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.456387] env[62405]: DEBUG oslo_vmware.api [None req-95031a91-3c8c-4837-a4d7-da95100f0fbb tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Waiting for the task: (returnval){ [ 1790.456387] env[62405]: value = "task-1947527" [ 1790.456387] env[62405]: _type = "Task" [ 1790.456387] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1790.464558] env[62405]: DEBUG oslo_vmware.api [None req-95031a91-3c8c-4837-a4d7-da95100f0fbb tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Task: {'id': task-1947527, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1790.509672] env[62405]: DEBUG oslo_concurrency.lockutils [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1790.574526] env[62405]: DEBUG oslo_concurrency.lockutils [req-f06ce5f4-afd5-4cab-a2d8-3437703a8ba7 req-4d02d23e-fa62-4e1c-b361-c3442874fb2c service nova] Releasing lock "refresh_cache-153adb6e-5381-4e91-881e-8e566a16905a" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1790.813582] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f9f76991-3d7f-4093-aa61-e98012271936 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.549s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1790.813894] env[62405]: INFO nova.compute.manager [None req-f9f76991-3d7f-4093-aa61-e98012271936 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a1a84837-deef-4ffc-8a47-4891bfc2c87a] Migrating [ 1790.820434] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e2337ee4-aebc-4368-a415-d3997201c53c tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 43.694s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1790.898221] env[62405]: DEBUG oslo_vmware.api [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Task: {'id': task-1947525, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1790.932332] env[62405]: DEBUG nova.compute.manager [req-2d1c7a3e-e627-4e05-a067-b5491266c3fd req-32a8ddb8-855d-41e7-b6fd-315be9c00bf2 service nova] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Received event network-vif-plugged-09308517-a17c-48d3-b01f-fed73b19adfd {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1790.932572] env[62405]: DEBUG oslo_concurrency.lockutils [req-2d1c7a3e-e627-4e05-a067-b5491266c3fd req-32a8ddb8-855d-41e7-b6fd-315be9c00bf2 service nova] Acquiring lock "9d97bf1d-6830-48b1-831b-bf2b52188f32-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1790.932798] env[62405]: DEBUG oslo_concurrency.lockutils [req-2d1c7a3e-e627-4e05-a067-b5491266c3fd req-32a8ddb8-855d-41e7-b6fd-315be9c00bf2 service nova] Lock "9d97bf1d-6830-48b1-831b-bf2b52188f32-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1790.932978] env[62405]: DEBUG oslo_concurrency.lockutils [req-2d1c7a3e-e627-4e05-a067-b5491266c3fd req-32a8ddb8-855d-41e7-b6fd-315be9c00bf2 service nova] Lock "9d97bf1d-6830-48b1-831b-bf2b52188f32-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1790.933281] env[62405]: DEBUG nova.compute.manager [req-2d1c7a3e-e627-4e05-a067-b5491266c3fd req-32a8ddb8-855d-41e7-b6fd-315be9c00bf2 service nova] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] No waiting events found dispatching network-vif-plugged-09308517-a17c-48d3-b01f-fed73b19adfd {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1790.933417] env[62405]: WARNING nova.compute.manager [req-2d1c7a3e-e627-4e05-a067-b5491266c3fd req-32a8ddb8-855d-41e7-b6fd-315be9c00bf2 service nova] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Received unexpected event network-vif-plugged-09308517-a17c-48d3-b01f-fed73b19adfd for instance with vm_state building and task_state spawning. [ 1790.967638] env[62405]: DEBUG oslo_vmware.api [None req-95031a91-3c8c-4837-a4d7-da95100f0fbb tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Task: {'id': task-1947527, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.204398} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1790.967838] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-95031a91-3c8c-4837-a4d7-da95100f0fbb tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1790.967959] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-95031a91-3c8c-4837-a4d7-da95100f0fbb tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] [instance: f410acd2-f786-43bd-ad60-0a6248dedb1c] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1790.968184] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-95031a91-3c8c-4837-a4d7-da95100f0fbb tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] [instance: f410acd2-f786-43bd-ad60-0a6248dedb1c] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1790.968356] env[62405]: INFO nova.compute.manager [None req-95031a91-3c8c-4837-a4d7-da95100f0fbb tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] [instance: f410acd2-f786-43bd-ad60-0a6248dedb1c] Took 1.68 seconds to destroy the instance on the hypervisor. [ 1790.972016] env[62405]: DEBUG oslo.service.loopingcall [None req-95031a91-3c8c-4837-a4d7-da95100f0fbb tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1790.972016] env[62405]: DEBUG nova.compute.manager [-] [instance: f410acd2-f786-43bd-ad60-0a6248dedb1c] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1790.972016] env[62405]: DEBUG nova.network.neutron [-] [instance: f410acd2-f786-43bd-ad60-0a6248dedb1c] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1791.045386] env[62405]: DEBUG nova.network.neutron [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Successfully updated port: 09308517-a17c-48d3-b01f-fed73b19adfd {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1791.326119] env[62405]: INFO nova.compute.claims [None req-e2337ee4-aebc-4368-a415-d3997201c53c tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1791.335514] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f9f76991-3d7f-4093-aa61-e98012271936 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquiring lock "refresh_cache-a1a84837-deef-4ffc-8a47-4891bfc2c87a" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1791.335514] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f9f76991-3d7f-4093-aa61-e98012271936 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquired lock "refresh_cache-a1a84837-deef-4ffc-8a47-4891bfc2c87a" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1791.335514] env[62405]: DEBUG nova.network.neutron [None req-f9f76991-3d7f-4093-aa61-e98012271936 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a1a84837-deef-4ffc-8a47-4891bfc2c87a] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1791.395907] env[62405]: DEBUG oslo_vmware.api [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Task: {'id': task-1947525, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1791.537077] env[62405]: DEBUG nova.compute.manager [req-bc8636ac-78d7-4ad6-b978-84c7faa4d980 req-711d4fdf-e68f-483f-9ec6-5fc73db46c55 service nova] [instance: 153adb6e-5381-4e91-881e-8e566a16905a] Received event network-vif-plugged-aa6cc405-9347-42f7-8532-fbf0538c5ed8 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1791.537311] env[62405]: DEBUG oslo_concurrency.lockutils [req-bc8636ac-78d7-4ad6-b978-84c7faa4d980 req-711d4fdf-e68f-483f-9ec6-5fc73db46c55 service nova] Acquiring lock "153adb6e-5381-4e91-881e-8e566a16905a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1791.537525] env[62405]: DEBUG oslo_concurrency.lockutils [req-bc8636ac-78d7-4ad6-b978-84c7faa4d980 req-711d4fdf-e68f-483f-9ec6-5fc73db46c55 service nova] Lock "153adb6e-5381-4e91-881e-8e566a16905a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1791.537692] env[62405]: DEBUG oslo_concurrency.lockutils [req-bc8636ac-78d7-4ad6-b978-84c7faa4d980 req-711d4fdf-e68f-483f-9ec6-5fc73db46c55 service nova] Lock "153adb6e-5381-4e91-881e-8e566a16905a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1791.537859] env[62405]: DEBUG nova.compute.manager [req-bc8636ac-78d7-4ad6-b978-84c7faa4d980 req-711d4fdf-e68f-483f-9ec6-5fc73db46c55 service nova] [instance: 153adb6e-5381-4e91-881e-8e566a16905a] No waiting events found dispatching network-vif-plugged-aa6cc405-9347-42f7-8532-fbf0538c5ed8 {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1791.538031] env[62405]: WARNING nova.compute.manager [req-bc8636ac-78d7-4ad6-b978-84c7faa4d980 req-711d4fdf-e68f-483f-9ec6-5fc73db46c55 service nova] [instance: 153adb6e-5381-4e91-881e-8e566a16905a] Received unexpected event network-vif-plugged-aa6cc405-9347-42f7-8532-fbf0538c5ed8 for instance with vm_state building and task_state spawning. [ 1791.538198] env[62405]: DEBUG nova.compute.manager [req-bc8636ac-78d7-4ad6-b978-84c7faa4d980 req-711d4fdf-e68f-483f-9ec6-5fc73db46c55 service nova] [instance: 153adb6e-5381-4e91-881e-8e566a16905a] Received event network-changed-aa6cc405-9347-42f7-8532-fbf0538c5ed8 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1791.538351] env[62405]: DEBUG nova.compute.manager [req-bc8636ac-78d7-4ad6-b978-84c7faa4d980 req-711d4fdf-e68f-483f-9ec6-5fc73db46c55 service nova] [instance: 153adb6e-5381-4e91-881e-8e566a16905a] Refreshing instance network info cache due to event network-changed-aa6cc405-9347-42f7-8532-fbf0538c5ed8. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1791.538529] env[62405]: DEBUG oslo_concurrency.lockutils [req-bc8636ac-78d7-4ad6-b978-84c7faa4d980 req-711d4fdf-e68f-483f-9ec6-5fc73db46c55 service nova] Acquiring lock "refresh_cache-153adb6e-5381-4e91-881e-8e566a16905a" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1791.538707] env[62405]: DEBUG oslo_concurrency.lockutils [req-bc8636ac-78d7-4ad6-b978-84c7faa4d980 req-711d4fdf-e68f-483f-9ec6-5fc73db46c55 service nova] Acquired lock "refresh_cache-153adb6e-5381-4e91-881e-8e566a16905a" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1791.538885] env[62405]: DEBUG nova.network.neutron [req-bc8636ac-78d7-4ad6-b978-84c7faa4d980 req-711d4fdf-e68f-483f-9ec6-5fc73db46c55 service nova] [instance: 153adb6e-5381-4e91-881e-8e566a16905a] Refreshing network info cache for port aa6cc405-9347-42f7-8532-fbf0538c5ed8 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1791.548097] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquiring lock "refresh_cache-9d97bf1d-6830-48b1-831b-bf2b52188f32" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1791.548233] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquired lock "refresh_cache-9d97bf1d-6830-48b1-831b-bf2b52188f32" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1791.548487] env[62405]: DEBUG nova.network.neutron [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1791.743998] env[62405]: DEBUG nova.network.neutron [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] [instance: 153adb6e-5381-4e91-881e-8e566a16905a] Successfully updated port: e9e348a3-5e47-451e-982d-38f1c834ac1b {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1791.745734] env[62405]: DEBUG nova.network.neutron [-] [instance: f410acd2-f786-43bd-ad60-0a6248dedb1c] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1791.838640] env[62405]: INFO nova.compute.resource_tracker [None req-e2337ee4-aebc-4368-a415-d3997201c53c tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Updating resource usage from migration f4e1eabb-c8ee-4e3c-b80a-8f1b540ce872 [ 1791.896763] env[62405]: DEBUG oslo_vmware.api [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Task: {'id': task-1947525, 'name': ReconfigVM_Task, 'duration_secs': 1.652572} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1791.899362] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 48554024-9b6f-44be-b21e-615b25cd790c] Reconfigured VM instance instance-00000044 to attach disk [datastore1] 48554024-9b6f-44be-b21e-615b25cd790c/48554024-9b6f-44be-b21e-615b25cd790c.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1791.899953] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fa3fd96e-a127-4411-a5cc-709faab97e5e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.908389] env[62405]: DEBUG oslo_vmware.api [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Waiting for the task: (returnval){ [ 1791.908389] env[62405]: value = "task-1947528" [ 1791.908389] env[62405]: _type = "Task" [ 1791.908389] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1791.916615] env[62405]: DEBUG oslo_vmware.api [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Task: {'id': task-1947528, 'name': Rename_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1792.080395] env[62405]: DEBUG nova.network.neutron [req-bc8636ac-78d7-4ad6-b978-84c7faa4d980 req-711d4fdf-e68f-483f-9ec6-5fc73db46c55 service nova] [instance: 153adb6e-5381-4e91-881e-8e566a16905a] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1792.123847] env[62405]: DEBUG nova.network.neutron [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1792.176215] env[62405]: DEBUG nova.network.neutron [req-bc8636ac-78d7-4ad6-b978-84c7faa4d980 req-711d4fdf-e68f-483f-9ec6-5fc73db46c55 service nova] [instance: 153adb6e-5381-4e91-881e-8e566a16905a] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1792.250727] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Acquiring lock "refresh_cache-153adb6e-5381-4e91-881e-8e566a16905a" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1792.250727] env[62405]: INFO nova.compute.manager [-] [instance: f410acd2-f786-43bd-ad60-0a6248dedb1c] Took 1.28 seconds to deallocate network for instance. [ 1792.395432] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d92c7093-bc92-4fdb-9c6c-5bb2998332b5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.403066] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-397ba7d0-0d59-4018-a505-562b29e643c5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.435583] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16b1a260-8e58-46a2-a199-d38c450e4ec1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.440987] env[62405]: DEBUG oslo_vmware.api [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Task: {'id': task-1947528, 'name': Rename_Task, 'duration_secs': 0.275704} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1792.441614] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 48554024-9b6f-44be-b21e-615b25cd790c] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1792.441871] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-81e8e2e8-f862-46a1-8126-7cae9ffb422b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.446800] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2384a770-f10e-4492-b576-918bd7ffe6c3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.451349] env[62405]: DEBUG oslo_vmware.api [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Waiting for the task: (returnval){ [ 1792.451349] env[62405]: value = "task-1947529" [ 1792.451349] env[62405]: _type = "Task" [ 1792.451349] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1792.461684] env[62405]: DEBUG nova.compute.provider_tree [None req-e2337ee4-aebc-4368-a415-d3997201c53c tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1792.470553] env[62405]: DEBUG oslo_vmware.api [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Task: {'id': task-1947529, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1792.544667] env[62405]: DEBUG nova.network.neutron [None req-f9f76991-3d7f-4093-aa61-e98012271936 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a1a84837-deef-4ffc-8a47-4891bfc2c87a] Updating instance_info_cache with network_info: [{"id": "64634a81-f1e1-4078-894a-2f4e8b56de13", "address": "fa:16:3e:e0:c1:e2", "network": {"id": "845c4582-a0c8-4565-8025-5cc0fd22ff9a", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1066509768-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f1a1645e38674042828c78155974f95e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap64634a81-f1", "ovs_interfaceid": "64634a81-f1e1-4078-894a-2f4e8b56de13", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1792.620346] env[62405]: DEBUG nova.network.neutron [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Updating instance_info_cache with network_info: [{"id": "09308517-a17c-48d3-b01f-fed73b19adfd", "address": "fa:16:3e:d4:17:23", "network": {"id": "8e7f7222-48db-4dd5-a9e8-9a6d2b598918", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1945720715-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba9083cddcc24345b6ea5d2cbbbec5ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap09308517-a1", "ovs_interfaceid": "09308517-a17c-48d3-b01f-fed73b19adfd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1792.678845] env[62405]: DEBUG oslo_concurrency.lockutils [req-bc8636ac-78d7-4ad6-b978-84c7faa4d980 req-711d4fdf-e68f-483f-9ec6-5fc73db46c55 service nova] Releasing lock "refresh_cache-153adb6e-5381-4e91-881e-8e566a16905a" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1792.679134] env[62405]: DEBUG nova.compute.manager [req-bc8636ac-78d7-4ad6-b978-84c7faa4d980 req-711d4fdf-e68f-483f-9ec6-5fc73db46c55 service nova] [instance: f410acd2-f786-43bd-ad60-0a6248dedb1c] Received event network-vif-deleted-98b2004d-b7ad-4c97-bf77-8dbdb1077689 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1792.679324] env[62405]: INFO nova.compute.manager [req-bc8636ac-78d7-4ad6-b978-84c7faa4d980 req-711d4fdf-e68f-483f-9ec6-5fc73db46c55 service nova] [instance: f410acd2-f786-43bd-ad60-0a6248dedb1c] Neutron deleted interface 98b2004d-b7ad-4c97-bf77-8dbdb1077689; detaching it from the instance and deleting it from the info cache [ 1792.679502] env[62405]: DEBUG nova.network.neutron [req-bc8636ac-78d7-4ad6-b978-84c7faa4d980 req-711d4fdf-e68f-483f-9ec6-5fc73db46c55 service nova] [instance: f410acd2-f786-43bd-ad60-0a6248dedb1c] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1792.680591] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Acquired lock "refresh_cache-153adb6e-5381-4e91-881e-8e566a16905a" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1792.680736] env[62405]: DEBUG nova.network.neutron [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] [instance: 153adb6e-5381-4e91-881e-8e566a16905a] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1792.761234] env[62405]: DEBUG oslo_concurrency.lockutils [None req-95031a91-3c8c-4837-a4d7-da95100f0fbb tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1792.962176] env[62405]: DEBUG oslo_vmware.api [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Task: {'id': task-1947529, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1792.964720] env[62405]: DEBUG nova.compute.manager [req-7a7982bb-53e4-409b-8dfb-358f6b58f2f7 req-c06fe2b3-b1b1-4a30-8356-781c82696d5d service nova] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Received event network-changed-09308517-a17c-48d3-b01f-fed73b19adfd {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1792.964908] env[62405]: DEBUG nova.compute.manager [req-7a7982bb-53e4-409b-8dfb-358f6b58f2f7 req-c06fe2b3-b1b1-4a30-8356-781c82696d5d service nova] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Refreshing instance network info cache due to event network-changed-09308517-a17c-48d3-b01f-fed73b19adfd. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1792.965112] env[62405]: DEBUG oslo_concurrency.lockutils [req-7a7982bb-53e4-409b-8dfb-358f6b58f2f7 req-c06fe2b3-b1b1-4a30-8356-781c82696d5d service nova] Acquiring lock "refresh_cache-9d97bf1d-6830-48b1-831b-bf2b52188f32" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1792.965850] env[62405]: DEBUG nova.scheduler.client.report [None req-e2337ee4-aebc-4368-a415-d3997201c53c tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1793.050991] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f9f76991-3d7f-4093-aa61-e98012271936 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Releasing lock "refresh_cache-a1a84837-deef-4ffc-8a47-4891bfc2c87a" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1793.123182] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Releasing lock "refresh_cache-9d97bf1d-6830-48b1-831b-bf2b52188f32" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1793.123536] env[62405]: DEBUG nova.compute.manager [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Instance network_info: |[{"id": "09308517-a17c-48d3-b01f-fed73b19adfd", "address": "fa:16:3e:d4:17:23", "network": {"id": "8e7f7222-48db-4dd5-a9e8-9a6d2b598918", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1945720715-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba9083cddcc24345b6ea5d2cbbbec5ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap09308517-a1", "ovs_interfaceid": "09308517-a17c-48d3-b01f-fed73b19adfd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1793.123838] env[62405]: DEBUG oslo_concurrency.lockutils [req-7a7982bb-53e4-409b-8dfb-358f6b58f2f7 req-c06fe2b3-b1b1-4a30-8356-781c82696d5d service nova] Acquired lock "refresh_cache-9d97bf1d-6830-48b1-831b-bf2b52188f32" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1793.124033] env[62405]: DEBUG nova.network.neutron [req-7a7982bb-53e4-409b-8dfb-358f6b58f2f7 req-c06fe2b3-b1b1-4a30-8356-781c82696d5d service nova] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Refreshing network info cache for port 09308517-a17c-48d3-b01f-fed73b19adfd {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1793.125719] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d4:17:23', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '15538852-1a3f-4f71-b4a9-4923c5837c4f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '09308517-a17c-48d3-b01f-fed73b19adfd', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1793.134446] env[62405]: DEBUG oslo.service.loopingcall [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1793.135156] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1793.135399] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d7b9839a-ef75-4c0d-8a6c-825dd0d99839 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.155464] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1793.155464] env[62405]: value = "task-1947530" [ 1793.155464] env[62405]: _type = "Task" [ 1793.155464] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1793.163264] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947530, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1793.185526] env[62405]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d4e8ad13-00c7-4880-8a2a-152b5004bc1a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.198017] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-609396b7-16b3-49bb-9f53-ff203de67b8e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.228455] env[62405]: DEBUG nova.compute.manager [req-bc8636ac-78d7-4ad6-b978-84c7faa4d980 req-711d4fdf-e68f-483f-9ec6-5fc73db46c55 service nova] [instance: f410acd2-f786-43bd-ad60-0a6248dedb1c] Detach interface failed, port_id=98b2004d-b7ad-4c97-bf77-8dbdb1077689, reason: Instance f410acd2-f786-43bd-ad60-0a6248dedb1c could not be found. {{(pid=62405) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11483}} [ 1793.231138] env[62405]: DEBUG nova.network.neutron [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] [instance: 153adb6e-5381-4e91-881e-8e566a16905a] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1793.462312] env[62405]: DEBUG oslo_vmware.api [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Task: {'id': task-1947529, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1793.471633] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e2337ee4-aebc-4368-a415-d3997201c53c tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.651s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1793.471633] env[62405]: INFO nova.compute.manager [None req-e2337ee4-aebc-4368-a415-d3997201c53c tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Migrating [ 1793.485467] env[62405]: DEBUG oslo_concurrency.lockutils [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 41.212s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1793.487068] env[62405]: INFO nova.compute.claims [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: ff8731d6-3c55-4ddc-aeb1-308d72313881] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1793.666481] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947530, 'name': CreateVM_Task} progress is 25%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1793.925618] env[62405]: DEBUG nova.network.neutron [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] [instance: 153adb6e-5381-4e91-881e-8e566a16905a] Updating instance_info_cache with network_info: [{"id": "63dd773b-7125-4f8e-a520-2339dbdacbb2", "address": "fa:16:3e:3f:a1:6a", "network": {"id": "972ea379-57e0-405e-bbb0-d92b123b9c7c", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1626641140", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.84", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e0d6dfea772e432289163b14e9e341c1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "76e60ff4-204c-4f48-bd0e-2d5fa0a812ef", "external-id": "nsx-vlan-transportzone-854", "segmentation_id": 854, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap63dd773b-71", "ovs_interfaceid": "63dd773b-7125-4f8e-a520-2339dbdacbb2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "aa6cc405-9347-42f7-8532-fbf0538c5ed8", "address": "fa:16:3e:8b:f3:f3", "network": {"id": "03e71937-4950-4019-82cb-71009ce407c5", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-130535576", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.192", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "e0d6dfea772e432289163b14e9e341c1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7d413776-9a8c-4afd-856f-10dbb062ca95", "external-id": "nsx-vlan-transportzone-913", "segmentation_id": 913, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa6cc405-93", "ovs_interfaceid": "aa6cc405-9347-42f7-8532-fbf0538c5ed8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e9e348a3-5e47-451e-982d-38f1c834ac1b", "address": "fa:16:3e:2b:69:c7", "network": {"id": "972ea379-57e0-405e-bbb0-d92b123b9c7c", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1626641140", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.150", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e0d6dfea772e432289163b14e9e341c1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "76e60ff4-204c-4f48-bd0e-2d5fa0a812ef", "external-id": "nsx-vlan-transportzone-854", "segmentation_id": 854, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape9e348a3-5e", "ovs_interfaceid": "e9e348a3-5e47-451e-982d-38f1c834ac1b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1793.962989] env[62405]: DEBUG oslo_vmware.api [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Task: {'id': task-1947529, 'name': PowerOnVM_Task, 'duration_secs': 1.445219} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1793.963478] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 48554024-9b6f-44be-b21e-615b25cd790c] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1793.963703] env[62405]: INFO nova.compute.manager [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 48554024-9b6f-44be-b21e-615b25cd790c] Took 9.96 seconds to spawn the instance on the hypervisor. [ 1793.963889] env[62405]: DEBUG nova.compute.manager [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 48554024-9b6f-44be-b21e-615b25cd790c] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1793.964921] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fdbd074-71c1-420a-a1df-570c52383ea0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.998497] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e2337ee4-aebc-4368-a415-d3997201c53c tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Acquiring lock "refresh_cache-15218373-ffa5-49ce-b604-423b7fc5fb35" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1793.998670] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e2337ee4-aebc-4368-a415-d3997201c53c tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Acquired lock "refresh_cache-15218373-ffa5-49ce-b604-423b7fc5fb35" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1793.998842] env[62405]: DEBUG nova.network.neutron [None req-e2337ee4-aebc-4368-a415-d3997201c53c tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1794.113781] env[62405]: DEBUG nova.network.neutron [req-7a7982bb-53e4-409b-8dfb-358f6b58f2f7 req-c06fe2b3-b1b1-4a30-8356-781c82696d5d service nova] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Updated VIF entry in instance network info cache for port 09308517-a17c-48d3-b01f-fed73b19adfd. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1794.114223] env[62405]: DEBUG nova.network.neutron [req-7a7982bb-53e4-409b-8dfb-358f6b58f2f7 req-c06fe2b3-b1b1-4a30-8356-781c82696d5d service nova] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Updating instance_info_cache with network_info: [{"id": "09308517-a17c-48d3-b01f-fed73b19adfd", "address": "fa:16:3e:d4:17:23", "network": {"id": "8e7f7222-48db-4dd5-a9e8-9a6d2b598918", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1945720715-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba9083cddcc24345b6ea5d2cbbbec5ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap09308517-a1", "ovs_interfaceid": "09308517-a17c-48d3-b01f-fed73b19adfd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1794.165451] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947530, 'name': CreateVM_Task} progress is 25%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1794.428656] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Releasing lock "refresh_cache-153adb6e-5381-4e91-881e-8e566a16905a" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1794.429085] env[62405]: DEBUG nova.compute.manager [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] [instance: 153adb6e-5381-4e91-881e-8e566a16905a] Instance network_info: |[{"id": "63dd773b-7125-4f8e-a520-2339dbdacbb2", "address": "fa:16:3e:3f:a1:6a", "network": {"id": "972ea379-57e0-405e-bbb0-d92b123b9c7c", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1626641140", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.84", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e0d6dfea772e432289163b14e9e341c1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "76e60ff4-204c-4f48-bd0e-2d5fa0a812ef", "external-id": "nsx-vlan-transportzone-854", "segmentation_id": 854, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap63dd773b-71", "ovs_interfaceid": "63dd773b-7125-4f8e-a520-2339dbdacbb2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "aa6cc405-9347-42f7-8532-fbf0538c5ed8", "address": "fa:16:3e:8b:f3:f3", "network": {"id": "03e71937-4950-4019-82cb-71009ce407c5", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-130535576", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.192", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "e0d6dfea772e432289163b14e9e341c1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7d413776-9a8c-4afd-856f-10dbb062ca95", "external-id": "nsx-vlan-transportzone-913", "segmentation_id": 913, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa6cc405-93", "ovs_interfaceid": "aa6cc405-9347-42f7-8532-fbf0538c5ed8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e9e348a3-5e47-451e-982d-38f1c834ac1b", "address": "fa:16:3e:2b:69:c7", "network": {"id": "972ea379-57e0-405e-bbb0-d92b123b9c7c", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1626641140", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.150", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e0d6dfea772e432289163b14e9e341c1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "76e60ff4-204c-4f48-bd0e-2d5fa0a812ef", "external-id": "nsx-vlan-transportzone-854", "segmentation_id": 854, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape9e348a3-5e", "ovs_interfaceid": "e9e348a3-5e47-451e-982d-38f1c834ac1b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1794.429710] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] [instance: 153adb6e-5381-4e91-881e-8e566a16905a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3f:a1:6a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '76e60ff4-204c-4f48-bd0e-2d5fa0a812ef', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '63dd773b-7125-4f8e-a520-2339dbdacbb2', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:8b:f3:f3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7d413776-9a8c-4afd-856f-10dbb062ca95', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'aa6cc405-9347-42f7-8532-fbf0538c5ed8', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:2b:69:c7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '76e60ff4-204c-4f48-bd0e-2d5fa0a812ef', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e9e348a3-5e47-451e-982d-38f1c834ac1b', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1794.440822] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Creating folder: Project (e0d6dfea772e432289163b14e9e341c1). Parent ref: group-v401284. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1794.441059] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-116fd4bd-8df5-4bb3-b778-28914fa14682 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.454181] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Created folder: Project (e0d6dfea772e432289163b14e9e341c1) in parent group-v401284. [ 1794.454395] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Creating folder: Instances. Parent ref: group-v401491. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1794.454657] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fc12eb58-afcf-4ee9-9667-abed191243ee {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.464462] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Created folder: Instances in parent group-v401491. [ 1794.464698] env[62405]: DEBUG oslo.service.loopingcall [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1794.464880] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 153adb6e-5381-4e91-881e-8e566a16905a] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1794.465087] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ec2d231c-a473-48a0-adc8-11ce94766882 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.490223] env[62405]: INFO nova.compute.manager [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 48554024-9b6f-44be-b21e-615b25cd790c] Took 58.73 seconds to build instance. [ 1794.493499] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1794.493499] env[62405]: value = "task-1947533" [ 1794.493499] env[62405]: _type = "Task" [ 1794.493499] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1794.508133] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947533, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1794.568207] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3db58ad0-120e-4838-b676-97037e3d9a24 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.591477] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-f9f76991-3d7f-4093-aa61-e98012271936 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a1a84837-deef-4ffc-8a47-4891bfc2c87a] Updating instance 'a1a84837-deef-4ffc-8a47-4891bfc2c87a' progress to 0 {{(pid=62405) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1794.617597] env[62405]: DEBUG oslo_concurrency.lockutils [req-7a7982bb-53e4-409b-8dfb-358f6b58f2f7 req-c06fe2b3-b1b1-4a30-8356-781c82696d5d service nova] Releasing lock "refresh_cache-9d97bf1d-6830-48b1-831b-bf2b52188f32" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1794.617882] env[62405]: DEBUG nova.compute.manager [req-7a7982bb-53e4-409b-8dfb-358f6b58f2f7 req-c06fe2b3-b1b1-4a30-8356-781c82696d5d service nova] [instance: 153adb6e-5381-4e91-881e-8e566a16905a] Received event network-vif-plugged-e9e348a3-5e47-451e-982d-38f1c834ac1b {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1794.618142] env[62405]: DEBUG oslo_concurrency.lockutils [req-7a7982bb-53e4-409b-8dfb-358f6b58f2f7 req-c06fe2b3-b1b1-4a30-8356-781c82696d5d service nova] Acquiring lock "153adb6e-5381-4e91-881e-8e566a16905a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1794.618340] env[62405]: DEBUG oslo_concurrency.lockutils [req-7a7982bb-53e4-409b-8dfb-358f6b58f2f7 req-c06fe2b3-b1b1-4a30-8356-781c82696d5d service nova] Lock "153adb6e-5381-4e91-881e-8e566a16905a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1794.618516] env[62405]: DEBUG oslo_concurrency.lockutils [req-7a7982bb-53e4-409b-8dfb-358f6b58f2f7 req-c06fe2b3-b1b1-4a30-8356-781c82696d5d service nova] Lock "153adb6e-5381-4e91-881e-8e566a16905a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1794.618655] env[62405]: DEBUG nova.compute.manager [req-7a7982bb-53e4-409b-8dfb-358f6b58f2f7 req-c06fe2b3-b1b1-4a30-8356-781c82696d5d service nova] [instance: 153adb6e-5381-4e91-881e-8e566a16905a] No waiting events found dispatching network-vif-plugged-e9e348a3-5e47-451e-982d-38f1c834ac1b {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1794.619290] env[62405]: WARNING nova.compute.manager [req-7a7982bb-53e4-409b-8dfb-358f6b58f2f7 req-c06fe2b3-b1b1-4a30-8356-781c82696d5d service nova] [instance: 153adb6e-5381-4e91-881e-8e566a16905a] Received unexpected event network-vif-plugged-e9e348a3-5e47-451e-982d-38f1c834ac1b for instance with vm_state building and task_state spawning. [ 1794.619290] env[62405]: DEBUG nova.compute.manager [req-7a7982bb-53e4-409b-8dfb-358f6b58f2f7 req-c06fe2b3-b1b1-4a30-8356-781c82696d5d service nova] [instance: 153adb6e-5381-4e91-881e-8e566a16905a] Received event network-changed-e9e348a3-5e47-451e-982d-38f1c834ac1b {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1794.619290] env[62405]: DEBUG nova.compute.manager [req-7a7982bb-53e4-409b-8dfb-358f6b58f2f7 req-c06fe2b3-b1b1-4a30-8356-781c82696d5d service nova] [instance: 153adb6e-5381-4e91-881e-8e566a16905a] Refreshing instance network info cache due to event network-changed-e9e348a3-5e47-451e-982d-38f1c834ac1b. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1794.619590] env[62405]: DEBUG oslo_concurrency.lockutils [req-7a7982bb-53e4-409b-8dfb-358f6b58f2f7 req-c06fe2b3-b1b1-4a30-8356-781c82696d5d service nova] Acquiring lock "refresh_cache-153adb6e-5381-4e91-881e-8e566a16905a" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1794.619590] env[62405]: DEBUG oslo_concurrency.lockutils [req-7a7982bb-53e4-409b-8dfb-358f6b58f2f7 req-c06fe2b3-b1b1-4a30-8356-781c82696d5d service nova] Acquired lock "refresh_cache-153adb6e-5381-4e91-881e-8e566a16905a" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1794.619719] env[62405]: DEBUG nova.network.neutron [req-7a7982bb-53e4-409b-8dfb-358f6b58f2f7 req-c06fe2b3-b1b1-4a30-8356-781c82696d5d service nova] [instance: 153adb6e-5381-4e91-881e-8e566a16905a] Refreshing network info cache for port e9e348a3-5e47-451e-982d-38f1c834ac1b {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1794.672084] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947530, 'name': CreateVM_Task} progress is 25%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1794.803758] env[62405]: DEBUG nova.network.neutron [None req-e2337ee4-aebc-4368-a415-d3997201c53c tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Updating instance_info_cache with network_info: [{"id": "7e786917-4e46-4359-899e-afc1456451ae", "address": "fa:16:3e:75:14:e2", "network": {"id": "3ca0a5a2-a18f-47fa-9d7b-0e27aa414878", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1312490542-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.142", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f3b50cc219314108945bfc8b2c21849a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7edb7c08-2fae-4df5-9ec6-5ccf06d7e337", "external-id": "nsx-vlan-transportzone-309", "segmentation_id": 309, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e786917-4e", "ovs_interfaceid": "7e786917-4e46-4359-899e-afc1456451ae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1794.916268] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4eebf62a-e693-4e4e-99a5-01657826226a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.923945] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49e40837-112a-42ec-92da-efd32a4cb7f9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.955718] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8e11c38-dfff-4666-9ff2-1c01b714cbc3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.963742] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7af6cbb-c0b6-41ed-b11b-43d4114ae581 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.977656] env[62405]: DEBUG nova.compute.provider_tree [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1794.992506] env[62405]: DEBUG oslo_concurrency.lockutils [None req-75a3bd9f-400c-47e3-ac75-17f4c55f5a62 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Lock "48554024-9b6f-44be-b21e-615b25cd790c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 88.734s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1795.003987] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947533, 'name': CreateVM_Task} progress is 25%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1795.100635] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9f76991-3d7f-4093-aa61-e98012271936 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a1a84837-deef-4ffc-8a47-4891bfc2c87a] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1795.100876] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-28392035-0598-4433-8735-38cf6f5de7b1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.108380] env[62405]: DEBUG oslo_vmware.api [None req-f9f76991-3d7f-4093-aa61-e98012271936 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for the task: (returnval){ [ 1795.108380] env[62405]: value = "task-1947534" [ 1795.108380] env[62405]: _type = "Task" [ 1795.108380] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1795.118070] env[62405]: DEBUG oslo_vmware.api [None req-f9f76991-3d7f-4093-aa61-e98012271936 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947534, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1795.168787] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947530, 'name': CreateVM_Task} progress is 25%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1795.306506] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e2337ee4-aebc-4368-a415-d3997201c53c tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Releasing lock "refresh_cache-15218373-ffa5-49ce-b604-423b7fc5fb35" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1795.479019] env[62405]: DEBUG nova.network.neutron [req-7a7982bb-53e4-409b-8dfb-358f6b58f2f7 req-c06fe2b3-b1b1-4a30-8356-781c82696d5d service nova] [instance: 153adb6e-5381-4e91-881e-8e566a16905a] Updated VIF entry in instance network info cache for port e9e348a3-5e47-451e-982d-38f1c834ac1b. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1795.479019] env[62405]: DEBUG nova.network.neutron [req-7a7982bb-53e4-409b-8dfb-358f6b58f2f7 req-c06fe2b3-b1b1-4a30-8356-781c82696d5d service nova] [instance: 153adb6e-5381-4e91-881e-8e566a16905a] Updating instance_info_cache with network_info: [{"id": "63dd773b-7125-4f8e-a520-2339dbdacbb2", "address": "fa:16:3e:3f:a1:6a", "network": {"id": "972ea379-57e0-405e-bbb0-d92b123b9c7c", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1626641140", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.84", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e0d6dfea772e432289163b14e9e341c1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "76e60ff4-204c-4f48-bd0e-2d5fa0a812ef", "external-id": "nsx-vlan-transportzone-854", "segmentation_id": 854, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap63dd773b-71", "ovs_interfaceid": "63dd773b-7125-4f8e-a520-2339dbdacbb2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "aa6cc405-9347-42f7-8532-fbf0538c5ed8", "address": "fa:16:3e:8b:f3:f3", "network": {"id": "03e71937-4950-4019-82cb-71009ce407c5", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-130535576", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.192", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "e0d6dfea772e432289163b14e9e341c1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7d413776-9a8c-4afd-856f-10dbb062ca95", "external-id": "nsx-vlan-transportzone-913", "segmentation_id": 913, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa6cc405-93", "ovs_interfaceid": "aa6cc405-9347-42f7-8532-fbf0538c5ed8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e9e348a3-5e47-451e-982d-38f1c834ac1b", "address": "fa:16:3e:2b:69:c7", "network": {"id": "972ea379-57e0-405e-bbb0-d92b123b9c7c", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1626641140", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.150", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e0d6dfea772e432289163b14e9e341c1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "76e60ff4-204c-4f48-bd0e-2d5fa0a812ef", "external-id": "nsx-vlan-transportzone-854", "segmentation_id": 854, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape9e348a3-5e", "ovs_interfaceid": "e9e348a3-5e47-451e-982d-38f1c834ac1b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1795.481090] env[62405]: DEBUG nova.scheduler.client.report [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1795.507354] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947533, 'name': CreateVM_Task} progress is 25%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1795.619209] env[62405]: DEBUG oslo_vmware.api [None req-f9f76991-3d7f-4093-aa61-e98012271936 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947534, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1795.666937] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947530, 'name': CreateVM_Task} progress is 25%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1795.980661] env[62405]: DEBUG oslo_concurrency.lockutils [req-7a7982bb-53e4-409b-8dfb-358f6b58f2f7 req-c06fe2b3-b1b1-4a30-8356-781c82696d5d service nova] Releasing lock "refresh_cache-153adb6e-5381-4e91-881e-8e566a16905a" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1795.986606] env[62405]: DEBUG oslo_concurrency.lockutils [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.501s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1795.986784] env[62405]: DEBUG nova.compute.manager [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: ff8731d6-3c55-4ddc-aeb1-308d72313881] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1795.989613] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.504s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1795.990817] env[62405]: INFO nova.compute.claims [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1796.005823] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947533, 'name': CreateVM_Task} progress is 25%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1796.118766] env[62405]: DEBUG oslo_vmware.api [None req-f9f76991-3d7f-4093-aa61-e98012271936 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947534, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1796.167095] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947530, 'name': CreateVM_Task} progress is 25%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1796.494831] env[62405]: DEBUG nova.compute.utils [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1796.498914] env[62405]: DEBUG nova.compute.manager [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: ff8731d6-3c55-4ddc-aeb1-308d72313881] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1796.498914] env[62405]: DEBUG nova.network.neutron [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: ff8731d6-3c55-4ddc-aeb1-308d72313881] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1796.511031] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947533, 'name': CreateVM_Task, 'duration_secs': 1.800095} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1796.511031] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 153adb6e-5381-4e91-881e-8e566a16905a] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1796.511537] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1796.511700] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1796.512032] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1796.512691] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f9aef554-135e-43a4-b249-bd33022f9761 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.519484] env[62405]: DEBUG oslo_vmware.api [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Waiting for the task: (returnval){ [ 1796.519484] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]525996d2-019b-facf-805f-e1c78c127a76" [ 1796.519484] env[62405]: _type = "Task" [ 1796.519484] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1796.527489] env[62405]: DEBUG oslo_vmware.api [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]525996d2-019b-facf-805f-e1c78c127a76, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1796.561224] env[62405]: DEBUG nova.policy [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4ab5cc5829014c4ebafbf88400b22a8c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5ba2fba100b943a2a415ec37b9365388', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1796.623972] env[62405]: DEBUG oslo_vmware.api [None req-f9f76991-3d7f-4093-aa61-e98012271936 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947534, 'name': PowerOffVM_Task, 'duration_secs': 1.369541} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1796.624683] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9f76991-3d7f-4093-aa61-e98012271936 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a1a84837-deef-4ffc-8a47-4891bfc2c87a] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1796.625416] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-f9f76991-3d7f-4093-aa61-e98012271936 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a1a84837-deef-4ffc-8a47-4891bfc2c87a] Updating instance 'a1a84837-deef-4ffc-8a47-4891bfc2c87a' progress to 17 {{(pid=62405) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1796.671266] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947530, 'name': CreateVM_Task, 'duration_secs': 3.409593} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1796.671456] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1796.672171] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1796.823289] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36b05e5f-6d93-48a6-b7ed-bdb3205c0678 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.848851] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-e2337ee4-aebc-4368-a415-d3997201c53c tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Updating instance '15218373-ffa5-49ce-b604-423b7fc5fb35' progress to 0 {{(pid=62405) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1797.000589] env[62405]: DEBUG nova.compute.manager [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: ff8731d6-3c55-4ddc-aeb1-308d72313881] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1797.008262] env[62405]: DEBUG nova.network.neutron [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: ff8731d6-3c55-4ddc-aeb1-308d72313881] Successfully created port: 6298bc3e-ce57-411b-ad57-d919400c0aa0 {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1797.028333] env[62405]: DEBUG oslo_vmware.api [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]525996d2-019b-facf-805f-e1c78c127a76, 'name': SearchDatastore_Task, 'duration_secs': 0.011007} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1797.028649] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1797.030361] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] [instance: 153adb6e-5381-4e91-881e-8e566a16905a] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1797.030361] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1797.030361] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1797.030361] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1797.030361] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1797.030361] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1797.030361] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1931e3e6-81cd-44da-9839-405a89ba2a78 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.034688] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-81ebb059-0c89-4c34-a04c-eec9ed24c9ab {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.042985] env[62405]: DEBUG oslo_vmware.api [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Waiting for the task: (returnval){ [ 1797.042985] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52775e51-39a3-4d9d-a6d7-a0dbc4fbb01c" [ 1797.042985] env[62405]: _type = "Task" [ 1797.042985] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1797.044726] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1797.044905] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1797.053993] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2fd4bbef-c825-41fa-b257-2cb6c0f1f550 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.061887] env[62405]: DEBUG oslo_vmware.api [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Waiting for the task: (returnval){ [ 1797.061887] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5297be66-fe37-3c84-5613-253e4462fc57" [ 1797.061887] env[62405]: _type = "Task" [ 1797.061887] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1797.067019] env[62405]: DEBUG oslo_vmware.api [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52775e51-39a3-4d9d-a6d7-a0dbc4fbb01c, 'name': SearchDatastore_Task, 'duration_secs': 0.010489} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1797.069576] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1797.069830] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1797.070103] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1797.079756] env[62405]: DEBUG oslo_vmware.api [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5297be66-fe37-3c84-5613-253e4462fc57, 'name': SearchDatastore_Task, 'duration_secs': 0.010063} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1797.084321] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6fc8e1e0-10df-4812-9368-7d11616794f0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.089118] env[62405]: DEBUG oslo_vmware.api [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Waiting for the task: (returnval){ [ 1797.089118] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]529c80e3-3fd1-9521-c923-9a18ca79efc6" [ 1797.089118] env[62405]: _type = "Task" [ 1797.089118] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1797.098225] env[62405]: DEBUG oslo_vmware.api [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]529c80e3-3fd1-9521-c923-9a18ca79efc6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1797.135516] env[62405]: DEBUG nova.virt.hardware [None req-f9f76991-3d7f-4093-aa61-e98012271936 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:21Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1797.135803] env[62405]: DEBUG nova.virt.hardware [None req-f9f76991-3d7f-4093-aa61-e98012271936 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1797.135959] env[62405]: DEBUG nova.virt.hardware [None req-f9f76991-3d7f-4093-aa61-e98012271936 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1797.136163] env[62405]: DEBUG nova.virt.hardware [None req-f9f76991-3d7f-4093-aa61-e98012271936 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1797.136310] env[62405]: DEBUG nova.virt.hardware [None req-f9f76991-3d7f-4093-aa61-e98012271936 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1797.136507] env[62405]: DEBUG nova.virt.hardware [None req-f9f76991-3d7f-4093-aa61-e98012271936 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1797.137859] env[62405]: DEBUG nova.virt.hardware [None req-f9f76991-3d7f-4093-aa61-e98012271936 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1797.137859] env[62405]: DEBUG nova.virt.hardware [None req-f9f76991-3d7f-4093-aa61-e98012271936 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1797.137859] env[62405]: DEBUG nova.virt.hardware [None req-f9f76991-3d7f-4093-aa61-e98012271936 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1797.137859] env[62405]: DEBUG nova.virt.hardware [None req-f9f76991-3d7f-4093-aa61-e98012271936 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1797.137859] env[62405]: DEBUG nova.virt.hardware [None req-f9f76991-3d7f-4093-aa61-e98012271936 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1797.145358] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-57a556a8-edcc-429b-82f8-72473a4f7267 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.161309] env[62405]: DEBUG oslo_vmware.api [None req-f9f76991-3d7f-4093-aa61-e98012271936 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for the task: (returnval){ [ 1797.161309] env[62405]: value = "task-1947535" [ 1797.161309] env[62405]: _type = "Task" [ 1797.161309] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1797.169492] env[62405]: DEBUG oslo_vmware.api [None req-f9f76991-3d7f-4093-aa61-e98012271936 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947535, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1797.356990] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2337ee4-aebc-4368-a415-d3997201c53c tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1797.356990] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-613119e5-f595-4fd5-a6a7-a10c0ff0b10f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.363666] env[62405]: DEBUG oslo_vmware.api [None req-e2337ee4-aebc-4368-a415-d3997201c53c tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for the task: (returnval){ [ 1797.363666] env[62405]: value = "task-1947536" [ 1797.363666] env[62405]: _type = "Task" [ 1797.363666] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1797.375057] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2337ee4-aebc-4368-a415-d3997201c53c tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] VM already powered off {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1797.375404] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-e2337ee4-aebc-4368-a415-d3997201c53c tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Updating instance '15218373-ffa5-49ce-b604-423b7fc5fb35' progress to 17 {{(pid=62405) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1797.450605] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16fdf406-edab-4749-b7e3-d7a4901e4f42 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.460231] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4c08e37-3b2c-423b-8b27-1e67d0537832 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.506460] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87ca0dd1-0902-412b-9447-ea3266fcac52 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.515597] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b44a0b64-1b05-4ff0-aea4-1b3f38e2d34d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.528702] env[62405]: DEBUG nova.compute.provider_tree [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1797.599839] env[62405]: DEBUG oslo_vmware.api [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]529c80e3-3fd1-9521-c923-9a18ca79efc6, 'name': SearchDatastore_Task, 'duration_secs': 0.010645} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1797.599839] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1797.599839] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 153adb6e-5381-4e91-881e-8e566a16905a/153adb6e-5381-4e91-881e-8e566a16905a.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1797.600030] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1797.600116] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1797.600308] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fba713c2-5a82-4cde-84de-c268b2d0f800 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.602332] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-870b80fc-7c0f-401b-b55c-789f7a3a6d1a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.609443] env[62405]: DEBUG oslo_vmware.api [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Waiting for the task: (returnval){ [ 1797.609443] env[62405]: value = "task-1947537" [ 1797.609443] env[62405]: _type = "Task" [ 1797.609443] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1797.613379] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1797.613659] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1797.615120] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3ff9394d-2b25-4415-a7d2-33bdd6fef529 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.622757] env[62405]: DEBUG oslo_vmware.api [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Task: {'id': task-1947537, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1797.626953] env[62405]: DEBUG oslo_vmware.api [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Waiting for the task: (returnval){ [ 1797.626953] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5230eba8-07f2-f5ca-3311-4d0cf7376b55" [ 1797.626953] env[62405]: _type = "Task" [ 1797.626953] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1797.632941] env[62405]: DEBUG oslo_vmware.api [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5230eba8-07f2-f5ca-3311-4d0cf7376b55, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1797.671255] env[62405]: DEBUG oslo_vmware.api [None req-f9f76991-3d7f-4093-aa61-e98012271936 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947535, 'name': ReconfigVM_Task, 'duration_secs': 0.149792} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1797.671636] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-f9f76991-3d7f-4093-aa61-e98012271936 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a1a84837-deef-4ffc-8a47-4891bfc2c87a] Updating instance 'a1a84837-deef-4ffc-8a47-4891bfc2c87a' progress to 33 {{(pid=62405) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1797.743578] env[62405]: DEBUG oslo_concurrency.lockutils [None req-6b982257-c546-4ea9-a5a0-9879a2b632bf tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Acquiring lock "7256b956-e41a-40ec-a687-a129a8bafcb6" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1797.743893] env[62405]: DEBUG oslo_concurrency.lockutils [None req-6b982257-c546-4ea9-a5a0-9879a2b632bf tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Lock "7256b956-e41a-40ec-a687-a129a8bafcb6" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1797.744092] env[62405]: DEBUG nova.compute.manager [None req-6b982257-c546-4ea9-a5a0-9879a2b632bf tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 7256b956-e41a-40ec-a687-a129a8bafcb6] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1797.745021] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0009ac38-7fea-4b6c-88e0-ec25e804d7e0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.753821] env[62405]: DEBUG nova.compute.manager [None req-6b982257-c546-4ea9-a5a0-9879a2b632bf tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 7256b956-e41a-40ec-a687-a129a8bafcb6] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62405) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1797.754437] env[62405]: DEBUG nova.objects.instance [None req-6b982257-c546-4ea9-a5a0-9879a2b632bf tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Lazy-loading 'flavor' on Instance uuid 7256b956-e41a-40ec-a687-a129a8bafcb6 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1797.884546] env[62405]: DEBUG nova.virt.hardware [None req-e2337ee4-aebc-4368-a415-d3997201c53c tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:21Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1797.884820] env[62405]: DEBUG nova.virt.hardware [None req-e2337ee4-aebc-4368-a415-d3997201c53c tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1797.884980] env[62405]: DEBUG nova.virt.hardware [None req-e2337ee4-aebc-4368-a415-d3997201c53c tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1797.885178] env[62405]: DEBUG nova.virt.hardware [None req-e2337ee4-aebc-4368-a415-d3997201c53c tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1797.885325] env[62405]: DEBUG nova.virt.hardware [None req-e2337ee4-aebc-4368-a415-d3997201c53c tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1797.885474] env[62405]: DEBUG nova.virt.hardware [None req-e2337ee4-aebc-4368-a415-d3997201c53c tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1797.885680] env[62405]: DEBUG nova.virt.hardware [None req-e2337ee4-aebc-4368-a415-d3997201c53c tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1797.885837] env[62405]: DEBUG nova.virt.hardware [None req-e2337ee4-aebc-4368-a415-d3997201c53c tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1797.886013] env[62405]: DEBUG nova.virt.hardware [None req-e2337ee4-aebc-4368-a415-d3997201c53c tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1797.886213] env[62405]: DEBUG nova.virt.hardware [None req-e2337ee4-aebc-4368-a415-d3997201c53c tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1797.886438] env[62405]: DEBUG nova.virt.hardware [None req-e2337ee4-aebc-4368-a415-d3997201c53c tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1797.892121] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-01361f38-d8d4-45b3-a9f9-b1961c86c338 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.911802] env[62405]: DEBUG oslo_vmware.api [None req-e2337ee4-aebc-4368-a415-d3997201c53c tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for the task: (returnval){ [ 1797.911802] env[62405]: value = "task-1947538" [ 1797.911802] env[62405]: _type = "Task" [ 1797.911802] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1797.920651] env[62405]: DEBUG oslo_vmware.api [None req-e2337ee4-aebc-4368-a415-d3997201c53c tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1947538, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1798.018065] env[62405]: DEBUG nova.compute.manager [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: ff8731d6-3c55-4ddc-aeb1-308d72313881] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1798.032416] env[62405]: DEBUG nova.scheduler.client.report [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1798.052620] env[62405]: DEBUG nova.virt.hardware [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1798.052991] env[62405]: DEBUG nova.virt.hardware [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1798.053232] env[62405]: DEBUG nova.virt.hardware [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1798.053514] env[62405]: DEBUG nova.virt.hardware [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1798.053692] env[62405]: DEBUG nova.virt.hardware [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1798.053855] env[62405]: DEBUG nova.virt.hardware [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1798.054089] env[62405]: DEBUG nova.virt.hardware [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1798.054555] env[62405]: DEBUG nova.virt.hardware [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1798.054825] env[62405]: DEBUG nova.virt.hardware [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1798.055046] env[62405]: DEBUG nova.virt.hardware [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1798.055248] env[62405]: DEBUG nova.virt.hardware [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1798.056609] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf06019a-0796-43bd-8860-3fc8d209806b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.067139] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e158b3a-0e41-479c-a40d-0a484d5fd29a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.119473] env[62405]: DEBUG oslo_vmware.api [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Task: {'id': task-1947537, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1798.135826] env[62405]: DEBUG oslo_vmware.api [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5230eba8-07f2-f5ca-3311-4d0cf7376b55, 'name': SearchDatastore_Task, 'duration_secs': 0.009156} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1798.136793] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0e17016b-bdac-461a-8b1e-1bdeb4f2e9b4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.143322] env[62405]: DEBUG oslo_vmware.api [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Waiting for the task: (returnval){ [ 1798.143322] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]522fa2ed-cff3-5a60-4c66-a6a98e731d08" [ 1798.143322] env[62405]: _type = "Task" [ 1798.143322] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1798.151962] env[62405]: DEBUG oslo_vmware.api [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]522fa2ed-cff3-5a60-4c66-a6a98e731d08, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1798.178478] env[62405]: DEBUG nova.virt.hardware [None req-f9f76991-3d7f-4093-aa61-e98012271936 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1798.178832] env[62405]: DEBUG nova.virt.hardware [None req-f9f76991-3d7f-4093-aa61-e98012271936 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1798.178944] env[62405]: DEBUG nova.virt.hardware [None req-f9f76991-3d7f-4093-aa61-e98012271936 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1798.179083] env[62405]: DEBUG nova.virt.hardware [None req-f9f76991-3d7f-4093-aa61-e98012271936 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1798.179233] env[62405]: DEBUG nova.virt.hardware [None req-f9f76991-3d7f-4093-aa61-e98012271936 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1798.179383] env[62405]: DEBUG nova.virt.hardware [None req-f9f76991-3d7f-4093-aa61-e98012271936 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1798.179719] env[62405]: DEBUG nova.virt.hardware [None req-f9f76991-3d7f-4093-aa61-e98012271936 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1798.179965] env[62405]: DEBUG nova.virt.hardware [None req-f9f76991-3d7f-4093-aa61-e98012271936 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1798.180164] env[62405]: DEBUG nova.virt.hardware [None req-f9f76991-3d7f-4093-aa61-e98012271936 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1798.180336] env[62405]: DEBUG nova.virt.hardware [None req-f9f76991-3d7f-4093-aa61-e98012271936 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1798.180515] env[62405]: DEBUG nova.virt.hardware [None req-f9f76991-3d7f-4093-aa61-e98012271936 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1798.186201] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-f9f76991-3d7f-4093-aa61-e98012271936 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a1a84837-deef-4ffc-8a47-4891bfc2c87a] Reconfiguring VM instance instance-0000003c to detach disk 2000 {{(pid=62405) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1798.186548] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b4b967b4-ccce-41cb-b332-f1b2e7a721db {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.206028] env[62405]: DEBUG oslo_vmware.api [None req-f9f76991-3d7f-4093-aa61-e98012271936 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for the task: (returnval){ [ 1798.206028] env[62405]: value = "task-1947539" [ 1798.206028] env[62405]: _type = "Task" [ 1798.206028] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1798.212709] env[62405]: DEBUG oslo_vmware.api [None req-f9f76991-3d7f-4093-aa61-e98012271936 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947539, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1798.421438] env[62405]: DEBUG oslo_vmware.api [None req-e2337ee4-aebc-4368-a415-d3997201c53c tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1947538, 'name': ReconfigVM_Task, 'duration_secs': 0.291248} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1798.421830] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-e2337ee4-aebc-4368-a415-d3997201c53c tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Updating instance '15218373-ffa5-49ce-b604-423b7fc5fb35' progress to 33 {{(pid=62405) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1798.541062] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.549s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1798.541062] env[62405]: DEBUG nova.compute.manager [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1798.541874] env[62405]: DEBUG oslo_concurrency.lockutils [None req-90f1c58c-06d1-4fd1-ae5d-5225a87d330f tempest-DeleteServersAdminTestJSON-2054241673 tempest-DeleteServersAdminTestJSON-2054241673-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.590s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1798.542360] env[62405]: DEBUG nova.objects.instance [None req-90f1c58c-06d1-4fd1-ae5d-5225a87d330f tempest-DeleteServersAdminTestJSON-2054241673 tempest-DeleteServersAdminTestJSON-2054241673-project-admin] Lazy-loading 'resources' on Instance uuid 46240f5b-c6ab-481b-b20c-80cc727a79f4 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1798.620543] env[62405]: DEBUG oslo_vmware.api [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Task: {'id': task-1947537, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.599853} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1798.620818] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 153adb6e-5381-4e91-881e-8e566a16905a/153adb6e-5381-4e91-881e-8e566a16905a.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1798.621067] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] [instance: 153adb6e-5381-4e91-881e-8e566a16905a] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1798.621329] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e16f0532-2419-4551-9a0b-c0b2f77f8521 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.628546] env[62405]: DEBUG nova.compute.manager [req-468fd9b4-e342-4184-ad9a-4014df54f7ff req-bd923871-31cd-4b55-889b-8594b18e8508 service nova] [instance: ff8731d6-3c55-4ddc-aeb1-308d72313881] Received event network-vif-plugged-6298bc3e-ce57-411b-ad57-d919400c0aa0 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1798.628756] env[62405]: DEBUG oslo_concurrency.lockutils [req-468fd9b4-e342-4184-ad9a-4014df54f7ff req-bd923871-31cd-4b55-889b-8594b18e8508 service nova] Acquiring lock "ff8731d6-3c55-4ddc-aeb1-308d72313881-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1798.628961] env[62405]: DEBUG oslo_concurrency.lockutils [req-468fd9b4-e342-4184-ad9a-4014df54f7ff req-bd923871-31cd-4b55-889b-8594b18e8508 service nova] Lock "ff8731d6-3c55-4ddc-aeb1-308d72313881-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1798.629146] env[62405]: DEBUG oslo_concurrency.lockutils [req-468fd9b4-e342-4184-ad9a-4014df54f7ff req-bd923871-31cd-4b55-889b-8594b18e8508 service nova] Lock "ff8731d6-3c55-4ddc-aeb1-308d72313881-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1798.629314] env[62405]: DEBUG nova.compute.manager [req-468fd9b4-e342-4184-ad9a-4014df54f7ff req-bd923871-31cd-4b55-889b-8594b18e8508 service nova] [instance: ff8731d6-3c55-4ddc-aeb1-308d72313881] No waiting events found dispatching network-vif-plugged-6298bc3e-ce57-411b-ad57-d919400c0aa0 {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1798.629481] env[62405]: WARNING nova.compute.manager [req-468fd9b4-e342-4184-ad9a-4014df54f7ff req-bd923871-31cd-4b55-889b-8594b18e8508 service nova] [instance: ff8731d6-3c55-4ddc-aeb1-308d72313881] Received unexpected event network-vif-plugged-6298bc3e-ce57-411b-ad57-d919400c0aa0 for instance with vm_state building and task_state spawning. [ 1798.631335] env[62405]: DEBUG oslo_vmware.api [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Waiting for the task: (returnval){ [ 1798.631335] env[62405]: value = "task-1947540" [ 1798.631335] env[62405]: _type = "Task" [ 1798.631335] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1798.640363] env[62405]: DEBUG oslo_vmware.api [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Task: {'id': task-1947540, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1798.652317] env[62405]: DEBUG oslo_vmware.api [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]522fa2ed-cff3-5a60-4c66-a6a98e731d08, 'name': SearchDatastore_Task, 'duration_secs': 0.047529} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1798.652566] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1798.652816] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 9d97bf1d-6830-48b1-831b-bf2b52188f32/9d97bf1d-6830-48b1-831b-bf2b52188f32.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1798.653065] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8901908f-762c-48c4-b261-26f94c21f434 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.661094] env[62405]: DEBUG oslo_vmware.api [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Waiting for the task: (returnval){ [ 1798.661094] env[62405]: value = "task-1947541" [ 1798.661094] env[62405]: _type = "Task" [ 1798.661094] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1798.669774] env[62405]: DEBUG oslo_vmware.api [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947541, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1798.714204] env[62405]: DEBUG oslo_vmware.api [None req-f9f76991-3d7f-4093-aa61-e98012271936 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947539, 'name': ReconfigVM_Task, 'duration_secs': 0.172049} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1798.714536] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-f9f76991-3d7f-4093-aa61-e98012271936 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a1a84837-deef-4ffc-8a47-4891bfc2c87a] Reconfigured VM instance instance-0000003c to detach disk 2000 {{(pid=62405) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1798.715357] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-befaa224-ac05-4774-9713-610489531d70 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.739467] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-f9f76991-3d7f-4093-aa61-e98012271936 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a1a84837-deef-4ffc-8a47-4891bfc2c87a] Reconfiguring VM instance instance-0000003c to attach disk [datastore1] a1a84837-deef-4ffc-8a47-4891bfc2c87a/a1a84837-deef-4ffc-8a47-4891bfc2c87a.vmdk or device None with type thin {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1798.739776] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-03338df0-3ba1-4d0c-bec9-ae1ce758189a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.761779] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b982257-c546-4ea9-a5a0-9879a2b632bf tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 7256b956-e41a-40ec-a687-a129a8bafcb6] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1798.762136] env[62405]: DEBUG oslo_vmware.api [None req-f9f76991-3d7f-4093-aa61-e98012271936 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for the task: (returnval){ [ 1798.762136] env[62405]: value = "task-1947542" [ 1798.762136] env[62405]: _type = "Task" [ 1798.762136] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1798.762330] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c610c588-7bad-4afc-8361-338af6408c59 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.774462] env[62405]: DEBUG oslo_vmware.api [None req-f9f76991-3d7f-4093-aa61-e98012271936 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947542, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1798.775761] env[62405]: DEBUG oslo_vmware.api [None req-6b982257-c546-4ea9-a5a0-9879a2b632bf tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Waiting for the task: (returnval){ [ 1798.775761] env[62405]: value = "task-1947543" [ 1798.775761] env[62405]: _type = "Task" [ 1798.775761] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1798.783570] env[62405]: DEBUG oslo_vmware.api [None req-6b982257-c546-4ea9-a5a0-9879a2b632bf tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Task: {'id': task-1947543, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1798.859311] env[62405]: DEBUG nova.network.neutron [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: ff8731d6-3c55-4ddc-aeb1-308d72313881] Successfully updated port: 6298bc3e-ce57-411b-ad57-d919400c0aa0 {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1798.929357] env[62405]: DEBUG nova.virt.hardware [None req-e2337ee4-aebc-4368-a415-d3997201c53c tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1798.929667] env[62405]: DEBUG nova.virt.hardware [None req-e2337ee4-aebc-4368-a415-d3997201c53c tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1798.929814] env[62405]: DEBUG nova.virt.hardware [None req-e2337ee4-aebc-4368-a415-d3997201c53c tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1798.930065] env[62405]: DEBUG nova.virt.hardware [None req-e2337ee4-aebc-4368-a415-d3997201c53c tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1798.930279] env[62405]: DEBUG nova.virt.hardware [None req-e2337ee4-aebc-4368-a415-d3997201c53c tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1798.930444] env[62405]: DEBUG nova.virt.hardware [None req-e2337ee4-aebc-4368-a415-d3997201c53c tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1798.930710] env[62405]: DEBUG nova.virt.hardware [None req-e2337ee4-aebc-4368-a415-d3997201c53c tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1798.930923] env[62405]: DEBUG nova.virt.hardware [None req-e2337ee4-aebc-4368-a415-d3997201c53c tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1798.931186] env[62405]: DEBUG nova.virt.hardware [None req-e2337ee4-aebc-4368-a415-d3997201c53c tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1798.931362] env[62405]: DEBUG nova.virt.hardware [None req-e2337ee4-aebc-4368-a415-d3997201c53c tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1798.931581] env[62405]: DEBUG nova.virt.hardware [None req-e2337ee4-aebc-4368-a415-d3997201c53c tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1798.937910] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-e2337ee4-aebc-4368-a415-d3997201c53c tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Reconfiguring VM instance instance-0000001a to detach disk 2000 {{(pid=62405) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1798.938265] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4ce431f9-b7a2-4b87-babb-34c674085b9f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.961689] env[62405]: DEBUG oslo_vmware.api [None req-e2337ee4-aebc-4368-a415-d3997201c53c tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for the task: (returnval){ [ 1798.961689] env[62405]: value = "task-1947544" [ 1798.961689] env[62405]: _type = "Task" [ 1798.961689] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1798.974288] env[62405]: DEBUG oslo_vmware.api [None req-e2337ee4-aebc-4368-a415-d3997201c53c tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1947544, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1799.048616] env[62405]: DEBUG nova.compute.utils [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1799.051242] env[62405]: DEBUG nova.compute.manager [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1799.051242] env[62405]: DEBUG nova.network.neutron [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1799.120675] env[62405]: DEBUG nova.policy [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5386353a4f6d41d0be6b056a129eb125', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '521150d8f23f4f76a0c785481c99e897', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1799.141667] env[62405]: DEBUG oslo_vmware.api [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Task: {'id': task-1947540, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078971} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1799.144902] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] [instance: 153adb6e-5381-4e91-881e-8e566a16905a] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1799.146478] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbd96fb9-4f30-4a16-b6ae-606cb5e5c2ca {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.179127] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] [instance: 153adb6e-5381-4e91-881e-8e566a16905a] Reconfiguring VM instance instance-00000045 to attach disk [datastore1] 153adb6e-5381-4e91-881e-8e566a16905a/153adb6e-5381-4e91-881e-8e566a16905a.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1799.186800] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5d0bb479-18b6-444a-8791-d45b878ce346 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.209936] env[62405]: DEBUG oslo_vmware.api [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947541, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1799.211804] env[62405]: DEBUG oslo_vmware.api [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Waiting for the task: (returnval){ [ 1799.211804] env[62405]: value = "task-1947545" [ 1799.211804] env[62405]: _type = "Task" [ 1799.211804] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1799.226097] env[62405]: DEBUG oslo_vmware.api [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Task: {'id': task-1947545, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1799.275364] env[62405]: DEBUG oslo_vmware.api [None req-f9f76991-3d7f-4093-aa61-e98012271936 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947542, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1799.284282] env[62405]: DEBUG oslo_vmware.api [None req-6b982257-c546-4ea9-a5a0-9879a2b632bf tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Task: {'id': task-1947543, 'name': PowerOffVM_Task, 'duration_secs': 0.25283} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1799.284604] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b982257-c546-4ea9-a5a0-9879a2b632bf tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 7256b956-e41a-40ec-a687-a129a8bafcb6] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1799.284838] env[62405]: DEBUG nova.compute.manager [None req-6b982257-c546-4ea9-a5a0-9879a2b632bf tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 7256b956-e41a-40ec-a687-a129a8bafcb6] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1799.285742] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd3a8317-5340-4ec8-98ae-8b614dcf52a4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.362181] env[62405]: DEBUG oslo_concurrency.lockutils [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Acquiring lock "refresh_cache-ff8731d6-3c55-4ddc-aeb1-308d72313881" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1799.362242] env[62405]: DEBUG oslo_concurrency.lockutils [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Acquired lock "refresh_cache-ff8731d6-3c55-4ddc-aeb1-308d72313881" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1799.362387] env[62405]: DEBUG nova.network.neutron [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: ff8731d6-3c55-4ddc-aeb1-308d72313881] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1799.472623] env[62405]: DEBUG oslo_vmware.api [None req-e2337ee4-aebc-4368-a415-d3997201c53c tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1947544, 'name': ReconfigVM_Task, 'duration_secs': 0.342951} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1799.473014] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-e2337ee4-aebc-4368-a415-d3997201c53c tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Reconfigured VM instance instance-0000001a to detach disk 2000 {{(pid=62405) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1799.473953] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76b7f070-fa1b-40b3-b310-ee02781b7857 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.500097] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-e2337ee4-aebc-4368-a415-d3997201c53c tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Reconfiguring VM instance instance-0000001a to attach disk [datastore1] 15218373-ffa5-49ce-b604-423b7fc5fb35/15218373-ffa5-49ce-b604-423b7fc5fb35.vmdk or device None with type thin {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1799.502923] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eb79f9a9-0954-4c72-9558-a803bf18e759 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.521388] env[62405]: DEBUG oslo_vmware.api [None req-e2337ee4-aebc-4368-a415-d3997201c53c tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for the task: (returnval){ [ 1799.521388] env[62405]: value = "task-1947546" [ 1799.521388] env[62405]: _type = "Task" [ 1799.521388] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1799.532611] env[62405]: DEBUG oslo_vmware.api [None req-e2337ee4-aebc-4368-a415-d3997201c53c tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1947546, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1799.537397] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc4b52ec-ca5f-4276-a63f-bd8e4f5f10b9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.545674] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25c02903-c4f3-4951-8d2f-0d6df9356af0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.576156] env[62405]: DEBUG nova.compute.manager [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1799.582024] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2873739-f69a-4b23-8567-271dd53ff3de {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.587927] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a74bf41-aab8-40f1-986a-e1ae0a1b541c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.602448] env[62405]: DEBUG nova.compute.provider_tree [None req-90f1c58c-06d1-4fd1-ae5d-5225a87d330f tempest-DeleteServersAdminTestJSON-2054241673 tempest-DeleteServersAdminTestJSON-2054241673-project-admin] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1799.681634] env[62405]: DEBUG oslo_vmware.api [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947541, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.615686} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1799.682606] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 9d97bf1d-6830-48b1-831b-bf2b52188f32/9d97bf1d-6830-48b1-831b-bf2b52188f32.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1799.682606] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1799.682846] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-354629b1-c828-4b60-bd04-bea5347ab92b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.689793] env[62405]: DEBUG oslo_vmware.api [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Waiting for the task: (returnval){ [ 1799.689793] env[62405]: value = "task-1947547" [ 1799.689793] env[62405]: _type = "Task" [ 1799.689793] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1799.698142] env[62405]: DEBUG oslo_vmware.api [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947547, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1799.721307] env[62405]: DEBUG oslo_vmware.api [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Task: {'id': task-1947545, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1799.773645] env[62405]: DEBUG oslo_vmware.api [None req-f9f76991-3d7f-4093-aa61-e98012271936 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947542, 'name': ReconfigVM_Task, 'duration_secs': 0.78512} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1799.773928] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-f9f76991-3d7f-4093-aa61-e98012271936 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a1a84837-deef-4ffc-8a47-4891bfc2c87a] Reconfigured VM instance instance-0000003c to attach disk [datastore1] a1a84837-deef-4ffc-8a47-4891bfc2c87a/a1a84837-deef-4ffc-8a47-4891bfc2c87a.vmdk or device None with type thin {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1799.774251] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-f9f76991-3d7f-4093-aa61-e98012271936 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a1a84837-deef-4ffc-8a47-4891bfc2c87a] Updating instance 'a1a84837-deef-4ffc-8a47-4891bfc2c87a' progress to 50 {{(pid=62405) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1799.799023] env[62405]: DEBUG oslo_concurrency.lockutils [None req-6b982257-c546-4ea9-a5a0-9879a2b632bf tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Lock "7256b956-e41a-40ec-a687-a129a8bafcb6" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.055s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1799.824480] env[62405]: DEBUG nova.network.neutron [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Successfully created port: dba92750-bf41-4683-b71d-128391ff29d0 {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1799.946267] env[62405]: DEBUG nova.network.neutron [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: ff8731d6-3c55-4ddc-aeb1-308d72313881] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1800.032195] env[62405]: DEBUG oslo_vmware.api [None req-e2337ee4-aebc-4368-a415-d3997201c53c tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1947546, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1800.105868] env[62405]: DEBUG nova.scheduler.client.report [None req-90f1c58c-06d1-4fd1-ae5d-5225a87d330f tempest-DeleteServersAdminTestJSON-2054241673 tempest-DeleteServersAdminTestJSON-2054241673-project-admin] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1800.203656] env[62405]: DEBUG oslo_vmware.api [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947547, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.115771} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1800.206061] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1800.206926] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34579b29-ed3a-43f3-8820-16539f4ea496 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.235275] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Reconfiguring VM instance instance-00000046 to attach disk [datastore1] 9d97bf1d-6830-48b1-831b-bf2b52188f32/9d97bf1d-6830-48b1-831b-bf2b52188f32.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1800.239852] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-303f99a3-bcc8-41ee-bb8b-417d2c140c91 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.263247] env[62405]: DEBUG oslo_vmware.api [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Task: {'id': task-1947545, 'name': ReconfigVM_Task, 'duration_secs': 0.893507} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1800.266100] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] [instance: 153adb6e-5381-4e91-881e-8e566a16905a] Reconfigured VM instance instance-00000045 to attach disk [datastore1] 153adb6e-5381-4e91-881e-8e566a16905a/153adb6e-5381-4e91-881e-8e566a16905a.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1800.266100] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e9cf4ac0-60e9-40d3-89cc-2a2040180328 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.267119] env[62405]: DEBUG oslo_vmware.api [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Waiting for the task: (returnval){ [ 1800.267119] env[62405]: value = "task-1947548" [ 1800.267119] env[62405]: _type = "Task" [ 1800.267119] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1800.271830] env[62405]: DEBUG oslo_vmware.api [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Waiting for the task: (returnval){ [ 1800.271830] env[62405]: value = "task-1947549" [ 1800.271830] env[62405]: _type = "Task" [ 1800.271830] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1800.278040] env[62405]: DEBUG oslo_vmware.api [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947548, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1800.285774] env[62405]: DEBUG oslo_vmware.api [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Task: {'id': task-1947549, 'name': Rename_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1800.286116] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9ecf639-a8b1-4990-b78b-b4e85332c73c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.308078] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a76bff47-b18f-42bc-a0b2-18ab2dbe62d9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.329240] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-f9f76991-3d7f-4093-aa61-e98012271936 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a1a84837-deef-4ffc-8a47-4891bfc2c87a] Updating instance 'a1a84837-deef-4ffc-8a47-4891bfc2c87a' progress to 67 {{(pid=62405) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1800.333606] env[62405]: DEBUG nova.network.neutron [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: ff8731d6-3c55-4ddc-aeb1-308d72313881] Updating instance_info_cache with network_info: [{"id": "6298bc3e-ce57-411b-ad57-d919400c0aa0", "address": "fa:16:3e:91:a1:76", "network": {"id": "c3707aa8-0488-4e47-a655-7bb7788995c0", "bridge": "br-int", "label": "tempest-ServersTestJSON-2054002489-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ba2fba100b943a2a415ec37b9365388", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "68ec9c06-8680-4a41-abad-cddbd1f768c9", "external-id": "nsx-vlan-transportzone-883", "segmentation_id": 883, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6298bc3e-ce", "ovs_interfaceid": "6298bc3e-ce57-411b-ad57-d919400c0aa0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1800.532474] env[62405]: DEBUG oslo_vmware.api [None req-e2337ee4-aebc-4368-a415-d3997201c53c tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1947546, 'name': ReconfigVM_Task, 'duration_secs': 0.823632} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1800.532791] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-e2337ee4-aebc-4368-a415-d3997201c53c tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Reconfigured VM instance instance-0000001a to attach disk [datastore1] 15218373-ffa5-49ce-b604-423b7fc5fb35/15218373-ffa5-49ce-b604-423b7fc5fb35.vmdk or device None with type thin {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1800.533110] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-e2337ee4-aebc-4368-a415-d3997201c53c tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Updating instance '15218373-ffa5-49ce-b604-423b7fc5fb35' progress to 50 {{(pid=62405) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1800.587738] env[62405]: DEBUG nova.compute.manager [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1800.613110] env[62405]: DEBUG nova.virt.hardware [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1800.613383] env[62405]: DEBUG nova.virt.hardware [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1800.613574] env[62405]: DEBUG nova.virt.hardware [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1800.613767] env[62405]: DEBUG nova.virt.hardware [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1800.613919] env[62405]: DEBUG nova.virt.hardware [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1800.614079] env[62405]: DEBUG nova.virt.hardware [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1800.614347] env[62405]: DEBUG nova.virt.hardware [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1800.614541] env[62405]: DEBUG nova.virt.hardware [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1800.614723] env[62405]: DEBUG nova.virt.hardware [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1800.614890] env[62405]: DEBUG nova.virt.hardware [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1800.615077] env[62405]: DEBUG nova.virt.hardware [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1800.615797] env[62405]: DEBUG oslo_concurrency.lockutils [None req-90f1c58c-06d1-4fd1-ae5d-5225a87d330f tempest-DeleteServersAdminTestJSON-2054241673 tempest-DeleteServersAdminTestJSON-2054241673-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.074s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1800.618272] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5ad3ea8-406a-43d3-a8a9-810cdeec5a90 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.621134] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.203s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1800.622592] env[62405]: INFO nova.compute.claims [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 2ab5f28c-1f71-4bea-8733-523e5570f5c6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1800.634029] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16a9c0c0-7836-49d9-b46c-26c6dbbdebc2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.648528] env[62405]: INFO nova.scheduler.client.report [None req-90f1c58c-06d1-4fd1-ae5d-5225a87d330f tempest-DeleteServersAdminTestJSON-2054241673 tempest-DeleteServersAdminTestJSON-2054241673-project-admin] Deleted allocations for instance 46240f5b-c6ab-481b-b20c-80cc727a79f4 [ 1800.659784] env[62405]: DEBUG nova.compute.manager [req-36de472a-5bb5-4f5f-a711-013fe1f2ef5c req-05fab915-ed61-4ad9-9a1a-967534c8edda service nova] [instance: ff8731d6-3c55-4ddc-aeb1-308d72313881] Received event network-changed-6298bc3e-ce57-411b-ad57-d919400c0aa0 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1800.659784] env[62405]: DEBUG nova.compute.manager [req-36de472a-5bb5-4f5f-a711-013fe1f2ef5c req-05fab915-ed61-4ad9-9a1a-967534c8edda service nova] [instance: ff8731d6-3c55-4ddc-aeb1-308d72313881] Refreshing instance network info cache due to event network-changed-6298bc3e-ce57-411b-ad57-d919400c0aa0. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1800.659784] env[62405]: DEBUG oslo_concurrency.lockutils [req-36de472a-5bb5-4f5f-a711-013fe1f2ef5c req-05fab915-ed61-4ad9-9a1a-967534c8edda service nova] Acquiring lock "refresh_cache-ff8731d6-3c55-4ddc-aeb1-308d72313881" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1800.675027] env[62405]: DEBUG nova.objects.instance [None req-1bbc0e7c-e729-4e68-b84b-5fe70106549a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Lazy-loading 'flavor' on Instance uuid 7256b956-e41a-40ec-a687-a129a8bafcb6 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1800.778896] env[62405]: DEBUG oslo_vmware.api [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947548, 'name': ReconfigVM_Task, 'duration_secs': 0.328346} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1800.779513] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Reconfigured VM instance instance-00000046 to attach disk [datastore1] 9d97bf1d-6830-48b1-831b-bf2b52188f32/9d97bf1d-6830-48b1-831b-bf2b52188f32.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1800.780139] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-eb8362d4-4895-4fe6-b198-757c1c473928 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.784100] env[62405]: DEBUG oslo_vmware.api [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Task: {'id': task-1947549, 'name': Rename_Task, 'duration_secs': 0.183536} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1800.784639] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] [instance: 153adb6e-5381-4e91-881e-8e566a16905a] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1800.784855] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7f164ac1-23a0-4069-8f7d-6e5897f8e227 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.789533] env[62405]: DEBUG oslo_vmware.api [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Waiting for the task: (returnval){ [ 1800.789533] env[62405]: value = "task-1947550" [ 1800.789533] env[62405]: _type = "Task" [ 1800.789533] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1800.790633] env[62405]: DEBUG oslo_vmware.api [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Waiting for the task: (returnval){ [ 1800.790633] env[62405]: value = "task-1947551" [ 1800.790633] env[62405]: _type = "Task" [ 1800.790633] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1800.801410] env[62405]: DEBUG oslo_vmware.api [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947550, 'name': Rename_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1800.804180] env[62405]: DEBUG oslo_vmware.api [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Task: {'id': task-1947551, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1800.839849] env[62405]: DEBUG oslo_concurrency.lockutils [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Releasing lock "refresh_cache-ff8731d6-3c55-4ddc-aeb1-308d72313881" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1800.839849] env[62405]: DEBUG nova.compute.manager [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: ff8731d6-3c55-4ddc-aeb1-308d72313881] Instance network_info: |[{"id": "6298bc3e-ce57-411b-ad57-d919400c0aa0", "address": "fa:16:3e:91:a1:76", "network": {"id": "c3707aa8-0488-4e47-a655-7bb7788995c0", "bridge": "br-int", "label": "tempest-ServersTestJSON-2054002489-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ba2fba100b943a2a415ec37b9365388", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "68ec9c06-8680-4a41-abad-cddbd1f768c9", "external-id": "nsx-vlan-transportzone-883", "segmentation_id": 883, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6298bc3e-ce", "ovs_interfaceid": "6298bc3e-ce57-411b-ad57-d919400c0aa0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1800.840138] env[62405]: DEBUG oslo_concurrency.lockutils [req-36de472a-5bb5-4f5f-a711-013fe1f2ef5c req-05fab915-ed61-4ad9-9a1a-967534c8edda service nova] Acquired lock "refresh_cache-ff8731d6-3c55-4ddc-aeb1-308d72313881" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1800.840306] env[62405]: DEBUG nova.network.neutron [req-36de472a-5bb5-4f5f-a711-013fe1f2ef5c req-05fab915-ed61-4ad9-9a1a-967534c8edda service nova] [instance: ff8731d6-3c55-4ddc-aeb1-308d72313881] Refreshing network info cache for port 6298bc3e-ce57-411b-ad57-d919400c0aa0 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1800.841582] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: ff8731d6-3c55-4ddc-aeb1-308d72313881] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:91:a1:76', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '68ec9c06-8680-4a41-abad-cddbd1f768c9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6298bc3e-ce57-411b-ad57-d919400c0aa0', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1800.849642] env[62405]: DEBUG oslo.service.loopingcall [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1800.852468] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ff8731d6-3c55-4ddc-aeb1-308d72313881] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1800.852988] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-765abcab-bc6e-4fc6-b52f-1823f5480b17 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.874706] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1800.874706] env[62405]: value = "task-1947552" [ 1800.874706] env[62405]: _type = "Task" [ 1800.874706] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1800.883402] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947552, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1800.887324] env[62405]: DEBUG nova.network.neutron [None req-f9f76991-3d7f-4093-aa61-e98012271936 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a1a84837-deef-4ffc-8a47-4891bfc2c87a] Port 64634a81-f1e1-4078-894a-2f4e8b56de13 binding to destination host cpu-1 is already ACTIVE {{(pid=62405) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1801.040734] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32f5595d-c797-4b09-baea-33fac86e8950 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.061856] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ed04e2d-6a24-45be-9a94-febc4ee1f21a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.081845] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-e2337ee4-aebc-4368-a415-d3997201c53c tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Updating instance '15218373-ffa5-49ce-b604-423b7fc5fb35' progress to 67 {{(pid=62405) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1801.097317] env[62405]: DEBUG nova.network.neutron [req-36de472a-5bb5-4f5f-a711-013fe1f2ef5c req-05fab915-ed61-4ad9-9a1a-967534c8edda service nova] [instance: ff8731d6-3c55-4ddc-aeb1-308d72313881] Updated VIF entry in instance network info cache for port 6298bc3e-ce57-411b-ad57-d919400c0aa0. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1801.097694] env[62405]: DEBUG nova.network.neutron [req-36de472a-5bb5-4f5f-a711-013fe1f2ef5c req-05fab915-ed61-4ad9-9a1a-967534c8edda service nova] [instance: ff8731d6-3c55-4ddc-aeb1-308d72313881] Updating instance_info_cache with network_info: [{"id": "6298bc3e-ce57-411b-ad57-d919400c0aa0", "address": "fa:16:3e:91:a1:76", "network": {"id": "c3707aa8-0488-4e47-a655-7bb7788995c0", "bridge": "br-int", "label": "tempest-ServersTestJSON-2054002489-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ba2fba100b943a2a415ec37b9365388", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "68ec9c06-8680-4a41-abad-cddbd1f768c9", "external-id": "nsx-vlan-transportzone-883", "segmentation_id": 883, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6298bc3e-ce", "ovs_interfaceid": "6298bc3e-ce57-411b-ad57-d919400c0aa0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1801.159901] env[62405]: DEBUG oslo_concurrency.lockutils [None req-90f1c58c-06d1-4fd1-ae5d-5225a87d330f tempest-DeleteServersAdminTestJSON-2054241673 tempest-DeleteServersAdminTestJSON-2054241673-project-admin] Lock "46240f5b-c6ab-481b-b20c-80cc727a79f4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.594s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1801.179729] env[62405]: DEBUG oslo_concurrency.lockutils [None req-1bbc0e7c-e729-4e68-b84b-5fe70106549a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Acquiring lock "refresh_cache-7256b956-e41a-40ec-a687-a129a8bafcb6" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1801.179916] env[62405]: DEBUG oslo_concurrency.lockutils [None req-1bbc0e7c-e729-4e68-b84b-5fe70106549a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Acquired lock "refresh_cache-7256b956-e41a-40ec-a687-a129a8bafcb6" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1801.180103] env[62405]: DEBUG nova.network.neutron [None req-1bbc0e7c-e729-4e68-b84b-5fe70106549a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 7256b956-e41a-40ec-a687-a129a8bafcb6] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1801.180287] env[62405]: DEBUG nova.objects.instance [None req-1bbc0e7c-e729-4e68-b84b-5fe70106549a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Lazy-loading 'info_cache' on Instance uuid 7256b956-e41a-40ec-a687-a129a8bafcb6 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1801.304201] env[62405]: DEBUG oslo_vmware.api [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Task: {'id': task-1947551, 'name': PowerOnVM_Task} progress is 74%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1801.307744] env[62405]: DEBUG oslo_vmware.api [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947550, 'name': Rename_Task, 'duration_secs': 0.140009} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1801.308092] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1801.308381] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-09d46d6f-f98c-463c-a732-0582b7cc7898 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.315134] env[62405]: DEBUG oslo_vmware.api [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Waiting for the task: (returnval){ [ 1801.315134] env[62405]: value = "task-1947553" [ 1801.315134] env[62405]: _type = "Task" [ 1801.315134] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1801.323390] env[62405]: DEBUG oslo_vmware.api [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947553, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1801.385489] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947552, 'name': CreateVM_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1801.600916] env[62405]: DEBUG oslo_concurrency.lockutils [req-36de472a-5bb5-4f5f-a711-013fe1f2ef5c req-05fab915-ed61-4ad9-9a1a-967534c8edda service nova] Releasing lock "refresh_cache-ff8731d6-3c55-4ddc-aeb1-308d72313881" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1801.624753] env[62405]: DEBUG nova.network.neutron [None req-e2337ee4-aebc-4368-a415-d3997201c53c tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Port 7e786917-4e46-4359-899e-afc1456451ae binding to destination host cpu-1 is already ACTIVE {{(pid=62405) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1801.684085] env[62405]: DEBUG nova.objects.base [None req-1bbc0e7c-e729-4e68-b84b-5fe70106549a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Object Instance<7256b956-e41a-40ec-a687-a129a8bafcb6> lazy-loaded attributes: flavor,info_cache {{(pid=62405) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1801.804879] env[62405]: DEBUG oslo_vmware.api [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Task: {'id': task-1947551, 'name': PowerOnVM_Task, 'duration_secs': 0.81976} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1801.807553] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] [instance: 153adb6e-5381-4e91-881e-8e566a16905a] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1801.807786] env[62405]: INFO nova.compute.manager [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] [instance: 153adb6e-5381-4e91-881e-8e566a16905a] Took 15.28 seconds to spawn the instance on the hypervisor. [ 1801.807970] env[62405]: DEBUG nova.compute.manager [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] [instance: 153adb6e-5381-4e91-881e-8e566a16905a] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1801.809033] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff6f58f2-8dd1-43d2-87b3-dbbf2b59fc7f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.826226] env[62405]: DEBUG oslo_vmware.api [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947553, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1801.885467] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947552, 'name': CreateVM_Task, 'duration_secs': 0.564523} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1801.885692] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ff8731d6-3c55-4ddc-aeb1-308d72313881] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1801.886430] env[62405]: DEBUG oslo_concurrency.lockutils [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1801.886619] env[62405]: DEBUG oslo_concurrency.lockutils [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1801.886942] env[62405]: DEBUG oslo_concurrency.lockutils [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1801.887214] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6b88d740-ca34-4a12-9ab2-8e4a0de9a6f9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.892216] env[62405]: DEBUG oslo_vmware.api [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Waiting for the task: (returnval){ [ 1801.892216] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]520791dd-04a9-9375-197f-b921e09b7dd0" [ 1801.892216] env[62405]: _type = "Task" [ 1801.892216] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1801.921256] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f9f76991-3d7f-4093-aa61-e98012271936 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquiring lock "a1a84837-deef-4ffc-8a47-4891bfc2c87a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1801.921465] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f9f76991-3d7f-4093-aa61-e98012271936 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Lock "a1a84837-deef-4ffc-8a47-4891bfc2c87a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1801.921723] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f9f76991-3d7f-4093-aa61-e98012271936 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Lock "a1a84837-deef-4ffc-8a47-4891bfc2c87a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1801.923153] env[62405]: DEBUG oslo_vmware.api [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]520791dd-04a9-9375-197f-b921e09b7dd0, 'name': SearchDatastore_Task, 'duration_secs': 0.011339} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1801.923552] env[62405]: DEBUG oslo_concurrency.lockutils [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1801.923788] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: ff8731d6-3c55-4ddc-aeb1-308d72313881] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1801.924047] env[62405]: DEBUG oslo_concurrency.lockutils [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1801.924326] env[62405]: DEBUG oslo_concurrency.lockutils [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1801.924532] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1801.924805] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-70cc9bb1-5218-42b6-b891-35d2ec8dbc95 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.935712] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1801.936026] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1801.936805] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-25c5f76f-0eea-4882-9ea2-37dd38bb38e0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.946764] env[62405]: DEBUG oslo_vmware.api [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Waiting for the task: (returnval){ [ 1801.946764] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]529eb3a2-d5d2-49dc-32db-f0b71cdfc62c" [ 1801.946764] env[62405]: _type = "Task" [ 1801.946764] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1801.956296] env[62405]: DEBUG oslo_vmware.api [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]529eb3a2-d5d2-49dc-32db-f0b71cdfc62c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1801.956466] env[62405]: DEBUG nova.network.neutron [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Successfully updated port: dba92750-bf41-4683-b71d-128391ff29d0 {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1802.112840] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f393ecc2-860a-4a4e-a733-d78510dd658c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.121752] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14570823-ebef-4ff2-a9b8-b8d4588ea8bb {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.159500] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62f94e94-f8e5-4e27-8f37-958336bbe904 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.168637] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf9a2924-4e15-47b9-91f2-6f4990675f80 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.182687] env[62405]: DEBUG nova.compute.provider_tree [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1802.332641] env[62405]: INFO nova.compute.manager [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] [instance: 153adb6e-5381-4e91-881e-8e566a16905a] Took 62.36 seconds to build instance. [ 1802.337663] env[62405]: DEBUG oslo_vmware.api [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947553, 'name': PowerOnVM_Task, 'duration_secs': 0.712856} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1802.337663] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1802.337871] env[62405]: INFO nova.compute.manager [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Took 12.05 seconds to spawn the instance on the hypervisor. [ 1802.337951] env[62405]: DEBUG nova.compute.manager [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1802.339253] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70a73049-87da-4af9-ba15-d8933d681785 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.454705] env[62405]: DEBUG oslo_concurrency.lockutils [None req-298f3e3e-50d5-4c87-be64-626a9ecfe54d tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Acquiring lock "73c5b28f-d21d-4ffc-9e67-911e4fb4db66" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1802.454948] env[62405]: DEBUG oslo_concurrency.lockutils [None req-298f3e3e-50d5-4c87-be64-626a9ecfe54d tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Lock "73c5b28f-d21d-4ffc-9e67-911e4fb4db66" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1802.462527] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Acquiring lock "refresh_cache-f16e3d13-6db6-4f61-b0e4-661856a9166b" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1802.462666] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Acquired lock "refresh_cache-f16e3d13-6db6-4f61-b0e4-661856a9166b" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1802.462812] env[62405]: DEBUG nova.network.neutron [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1802.464071] env[62405]: DEBUG oslo_vmware.api [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]529eb3a2-d5d2-49dc-32db-f0b71cdfc62c, 'name': SearchDatastore_Task, 'duration_secs': 0.019296} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1802.465200] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6f2fb1df-3765-4ed1-88c7-e9ee865b4720 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.473119] env[62405]: DEBUG oslo_vmware.api [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Waiting for the task: (returnval){ [ 1802.473119] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]523169c3-f51d-4788-bf62-3bf2f0b9049c" [ 1802.473119] env[62405]: _type = "Task" [ 1802.473119] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1802.484138] env[62405]: DEBUG oslo_vmware.api [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]523169c3-f51d-4788-bf62-3bf2f0b9049c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1802.542766] env[62405]: DEBUG nova.network.neutron [None req-1bbc0e7c-e729-4e68-b84b-5fe70106549a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 7256b956-e41a-40ec-a687-a129a8bafcb6] Updating instance_info_cache with network_info: [{"id": "a6bb60c9-208a-4c73-96e1-13626d7d1dd8", "address": "fa:16:3e:6f:12:15", "network": {"id": "858b74e5-8ffc-4b81-833e-5eb423dbf510", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-895575651-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d28fa9475f2f4a149bf00ccc63e70e3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37e8d2ee-abfc-42e2-a8fa-ee5447f1f1da", "external-id": "nsx-vlan-transportzone-813", "segmentation_id": 813, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa6bb60c9-20", "ovs_interfaceid": "a6bb60c9-208a-4c73-96e1-13626d7d1dd8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1802.674485] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e2337ee4-aebc-4368-a415-d3997201c53c tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Acquiring lock "15218373-ffa5-49ce-b604-423b7fc5fb35-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1802.674801] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e2337ee4-aebc-4368-a415-d3997201c53c tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Lock "15218373-ffa5-49ce-b604-423b7fc5fb35-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1802.675074] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e2337ee4-aebc-4368-a415-d3997201c53c tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Lock "15218373-ffa5-49ce-b604-423b7fc5fb35-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1802.686748] env[62405]: DEBUG nova.compute.manager [req-7abb036e-25a5-4534-99d3-8f5d11362768 req-c8305441-fdb7-4793-b092-9abcbcaeaf96 service nova] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Received event network-vif-plugged-dba92750-bf41-4683-b71d-128391ff29d0 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1802.686961] env[62405]: DEBUG oslo_concurrency.lockutils [req-7abb036e-25a5-4534-99d3-8f5d11362768 req-c8305441-fdb7-4793-b092-9abcbcaeaf96 service nova] Acquiring lock "f16e3d13-6db6-4f61-b0e4-661856a9166b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1802.687244] env[62405]: DEBUG oslo_concurrency.lockutils [req-7abb036e-25a5-4534-99d3-8f5d11362768 req-c8305441-fdb7-4793-b092-9abcbcaeaf96 service nova] Lock "f16e3d13-6db6-4f61-b0e4-661856a9166b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1802.687434] env[62405]: DEBUG oslo_concurrency.lockutils [req-7abb036e-25a5-4534-99d3-8f5d11362768 req-c8305441-fdb7-4793-b092-9abcbcaeaf96 service nova] Lock "f16e3d13-6db6-4f61-b0e4-661856a9166b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1802.687606] env[62405]: DEBUG nova.compute.manager [req-7abb036e-25a5-4534-99d3-8f5d11362768 req-c8305441-fdb7-4793-b092-9abcbcaeaf96 service nova] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] No waiting events found dispatching network-vif-plugged-dba92750-bf41-4683-b71d-128391ff29d0 {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1802.687774] env[62405]: WARNING nova.compute.manager [req-7abb036e-25a5-4534-99d3-8f5d11362768 req-c8305441-fdb7-4793-b092-9abcbcaeaf96 service nova] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Received unexpected event network-vif-plugged-dba92750-bf41-4683-b71d-128391ff29d0 for instance with vm_state building and task_state spawning. [ 1802.687936] env[62405]: DEBUG nova.compute.manager [req-7abb036e-25a5-4534-99d3-8f5d11362768 req-c8305441-fdb7-4793-b092-9abcbcaeaf96 service nova] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Received event network-changed-dba92750-bf41-4683-b71d-128391ff29d0 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1802.688123] env[62405]: DEBUG nova.compute.manager [req-7abb036e-25a5-4534-99d3-8f5d11362768 req-c8305441-fdb7-4793-b092-9abcbcaeaf96 service nova] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Refreshing instance network info cache due to event network-changed-dba92750-bf41-4683-b71d-128391ff29d0. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1802.688306] env[62405]: DEBUG oslo_concurrency.lockutils [req-7abb036e-25a5-4534-99d3-8f5d11362768 req-c8305441-fdb7-4793-b092-9abcbcaeaf96 service nova] Acquiring lock "refresh_cache-f16e3d13-6db6-4f61-b0e4-661856a9166b" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1802.689450] env[62405]: DEBUG nova.scheduler.client.report [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1802.834626] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9808dc0a-bf6c-4c1e-b3ec-5f367d47d3d7 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Lock "153adb6e-5381-4e91-881e-8e566a16905a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 93.837s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1802.857863] env[62405]: INFO nova.compute.manager [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Took 58.57 seconds to build instance. [ 1802.958335] env[62405]: DEBUG nova.compute.manager [None req-298f3e3e-50d5-4c87-be64-626a9ecfe54d tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] [instance: 73c5b28f-d21d-4ffc-9e67-911e4fb4db66] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1802.986653] env[62405]: DEBUG oslo_vmware.api [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]523169c3-f51d-4788-bf62-3bf2f0b9049c, 'name': SearchDatastore_Task, 'duration_secs': 0.043501} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1802.986931] env[62405]: DEBUG oslo_concurrency.lockutils [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1802.987203] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] ff8731d6-3c55-4ddc-aeb1-308d72313881/ff8731d6-3c55-4ddc-aeb1-308d72313881.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1802.987460] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d87d74e7-d52f-498e-8f3e-fc9aebc83aaa {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.994289] env[62405]: DEBUG oslo_vmware.api [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Waiting for the task: (returnval){ [ 1802.994289] env[62405]: value = "task-1947554" [ 1802.994289] env[62405]: _type = "Task" [ 1802.994289] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1803.002731] env[62405]: DEBUG oslo_vmware.api [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947554, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1803.003486] env[62405]: DEBUG nova.network.neutron [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1803.010172] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f9f76991-3d7f-4093-aa61-e98012271936 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquiring lock "refresh_cache-a1a84837-deef-4ffc-8a47-4891bfc2c87a" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1803.010356] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f9f76991-3d7f-4093-aa61-e98012271936 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquired lock "refresh_cache-a1a84837-deef-4ffc-8a47-4891bfc2c87a" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1803.010529] env[62405]: DEBUG nova.network.neutron [None req-f9f76991-3d7f-4093-aa61-e98012271936 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a1a84837-deef-4ffc-8a47-4891bfc2c87a] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1803.045053] env[62405]: DEBUG oslo_concurrency.lockutils [None req-1bbc0e7c-e729-4e68-b84b-5fe70106549a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Releasing lock "refresh_cache-7256b956-e41a-40ec-a687-a129a8bafcb6" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1803.150995] env[62405]: DEBUG nova.network.neutron [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Updating instance_info_cache with network_info: [{"id": "dba92750-bf41-4683-b71d-128391ff29d0", "address": "fa:16:3e:e6:78:c4", "network": {"id": "72000fdf-4f7a-4c95-a7ac-d8404249f55c", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-589425764-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "521150d8f23f4f76a0c785481c99e897", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "412cde91-d0f0-4193-b36b-d8b9d17384c6", "external-id": "nsx-vlan-transportzone-461", "segmentation_id": 461, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdba92750-bf", "ovs_interfaceid": "dba92750-bf41-4683-b71d-128391ff29d0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1803.194326] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.573s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1803.194919] env[62405]: DEBUG nova.compute.manager [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 2ab5f28c-1f71-4bea-8733-523e5570f5c6] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1803.197892] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7aa1a96d-62c9-4103-852f-07c7e30ac76e tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.306s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1803.197892] env[62405]: DEBUG nova.objects.instance [None req-7aa1a96d-62c9-4103-852f-07c7e30ac76e tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Lazy-loading 'resources' on Instance uuid 9b21fa71-8a0e-446a-9492-59e2b068237c {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1803.360529] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f97ac946-eca2-403a-b557-ed9118dab9c0 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Lock "9d97bf1d-6830-48b1-831b-bf2b52188f32" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 75.369s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1803.486658] env[62405]: DEBUG oslo_concurrency.lockutils [None req-298f3e3e-50d5-4c87-be64-626a9ecfe54d tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1803.505510] env[62405]: DEBUG oslo_vmware.api [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947554, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1803.653647] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Releasing lock "refresh_cache-f16e3d13-6db6-4f61-b0e4-661856a9166b" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1803.654031] env[62405]: DEBUG nova.compute.manager [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Instance network_info: |[{"id": "dba92750-bf41-4683-b71d-128391ff29d0", "address": "fa:16:3e:e6:78:c4", "network": {"id": "72000fdf-4f7a-4c95-a7ac-d8404249f55c", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-589425764-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "521150d8f23f4f76a0c785481c99e897", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "412cde91-d0f0-4193-b36b-d8b9d17384c6", "external-id": "nsx-vlan-transportzone-461", "segmentation_id": 461, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdba92750-bf", "ovs_interfaceid": "dba92750-bf41-4683-b71d-128391ff29d0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1803.654376] env[62405]: DEBUG oslo_concurrency.lockutils [req-7abb036e-25a5-4534-99d3-8f5d11362768 req-c8305441-fdb7-4793-b092-9abcbcaeaf96 service nova] Acquired lock "refresh_cache-f16e3d13-6db6-4f61-b0e4-661856a9166b" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1803.654582] env[62405]: DEBUG nova.network.neutron [req-7abb036e-25a5-4534-99d3-8f5d11362768 req-c8305441-fdb7-4793-b092-9abcbcaeaf96 service nova] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Refreshing network info cache for port dba92750-bf41-4683-b71d-128391ff29d0 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1803.655892] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e6:78:c4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '412cde91-d0f0-4193-b36b-d8b9d17384c6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dba92750-bf41-4683-b71d-128391ff29d0', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1803.667652] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Creating folder: Project (521150d8f23f4f76a0c785481c99e897). Parent ref: group-v401284. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1803.674218] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fa59f980-a04c-4c60-9f51-b3a7b86561a3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.689083] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Created folder: Project (521150d8f23f4f76a0c785481c99e897) in parent group-v401284. [ 1803.689083] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Creating folder: Instances. Parent ref: group-v401495. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1803.689083] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7e450873-158d-4398-ae54-465bac108da2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.697915] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Created folder: Instances in parent group-v401495. [ 1803.698191] env[62405]: DEBUG oslo.service.loopingcall [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1803.698384] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1803.698629] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cefc2dfa-978e-42d8-bece-72d29cea6a08 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.717848] env[62405]: DEBUG nova.compute.utils [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1803.722971] env[62405]: DEBUG nova.compute.manager [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 2ab5f28c-1f71-4bea-8733-523e5570f5c6] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1803.723175] env[62405]: DEBUG nova.network.neutron [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 2ab5f28c-1f71-4bea-8733-523e5570f5c6] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1803.730877] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1803.730877] env[62405]: value = "task-1947557" [ 1803.730877] env[62405]: _type = "Task" [ 1803.730877] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1803.741756] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947557, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1803.782908] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d467200b-cd1a-4978-aae0-70efe957fab3 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Acquiring lock "153adb6e-5381-4e91-881e-8e566a16905a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1803.783201] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d467200b-cd1a-4978-aae0-70efe957fab3 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Lock "153adb6e-5381-4e91-881e-8e566a16905a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1803.783418] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d467200b-cd1a-4978-aae0-70efe957fab3 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Acquiring lock "153adb6e-5381-4e91-881e-8e566a16905a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1803.783605] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d467200b-cd1a-4978-aae0-70efe957fab3 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Lock "153adb6e-5381-4e91-881e-8e566a16905a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1803.783773] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d467200b-cd1a-4978-aae0-70efe957fab3 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Lock "153adb6e-5381-4e91-881e-8e566a16905a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1803.789674] env[62405]: INFO nova.compute.manager [None req-d467200b-cd1a-4978-aae0-70efe957fab3 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] [instance: 153adb6e-5381-4e91-881e-8e566a16905a] Terminating instance [ 1803.798190] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e2337ee4-aebc-4368-a415-d3997201c53c tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Acquiring lock "refresh_cache-15218373-ffa5-49ce-b604-423b7fc5fb35" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1803.798190] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e2337ee4-aebc-4368-a415-d3997201c53c tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Acquired lock "refresh_cache-15218373-ffa5-49ce-b604-423b7fc5fb35" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1803.798190] env[62405]: DEBUG nova.network.neutron [None req-e2337ee4-aebc-4368-a415-d3997201c53c tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1803.803849] env[62405]: DEBUG nova.policy [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6472af0b6f6240f297f7f137cde41929', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bb1da47e8b1a400fab7817d9e6b282ed', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1803.877849] env[62405]: DEBUG nova.network.neutron [None req-f9f76991-3d7f-4093-aa61-e98012271936 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a1a84837-deef-4ffc-8a47-4891bfc2c87a] Updating instance_info_cache with network_info: [{"id": "64634a81-f1e1-4078-894a-2f4e8b56de13", "address": "fa:16:3e:e0:c1:e2", "network": {"id": "845c4582-a0c8-4565-8025-5cc0fd22ff9a", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1066509768-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f1a1645e38674042828c78155974f95e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap64634a81-f1", "ovs_interfaceid": "64634a81-f1e1-4078-894a-2f4e8b56de13", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1804.004796] env[62405]: DEBUG oslo_vmware.api [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947554, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.844022} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1804.007408] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] ff8731d6-3c55-4ddc-aeb1-308d72313881/ff8731d6-3c55-4ddc-aeb1-308d72313881.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1804.007714] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: ff8731d6-3c55-4ddc-aeb1-308d72313881] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1804.010904] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ffee908c-ada3-44d8-b6f7-6a26ac5f1cdb {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.016312] env[62405]: DEBUG oslo_vmware.api [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Waiting for the task: (returnval){ [ 1804.016312] env[62405]: value = "task-1947558" [ 1804.016312] env[62405]: _type = "Task" [ 1804.016312] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1804.028825] env[62405]: DEBUG oslo_vmware.api [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947558, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1804.055502] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-1bbc0e7c-e729-4e68-b84b-5fe70106549a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 7256b956-e41a-40ec-a687-a129a8bafcb6] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1804.055502] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3db04c17-5562-424a-94f1-ef38f833ab89 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.065301] env[62405]: DEBUG oslo_vmware.api [None req-1bbc0e7c-e729-4e68-b84b-5fe70106549a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Waiting for the task: (returnval){ [ 1804.065301] env[62405]: value = "task-1947559" [ 1804.065301] env[62405]: _type = "Task" [ 1804.065301] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1804.073704] env[62405]: DEBUG oslo_vmware.api [None req-1bbc0e7c-e729-4e68-b84b-5fe70106549a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Task: {'id': task-1947559, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1804.115220] env[62405]: DEBUG nova.network.neutron [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 2ab5f28c-1f71-4bea-8733-523e5570f5c6] Successfully created port: e4459292-2e66-49b3-bca3-94dc7cd7afbc {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1804.224167] env[62405]: DEBUG nova.compute.manager [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 2ab5f28c-1f71-4bea-8733-523e5570f5c6] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1804.245720] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947557, 'name': CreateVM_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1804.266016] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f868e2f-a258-42d0-835f-e01f4a888634 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.273270] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75320d42-eb6e-447e-a204-6a3bee055639 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.304756] env[62405]: DEBUG nova.compute.manager [None req-d467200b-cd1a-4978-aae0-70efe957fab3 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] [instance: 153adb6e-5381-4e91-881e-8e566a16905a] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1804.305214] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-d467200b-cd1a-4978-aae0-70efe957fab3 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] [instance: 153adb6e-5381-4e91-881e-8e566a16905a] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1804.308458] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bc4aba5-2cb7-4ba9-be8d-2b8affe2e998 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.311710] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f3cad8a-fe5f-45f4-a58f-61da9108798e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.322270] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e9d5ba1-6043-447c-85cb-6d6a224aa445 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.327058] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-d467200b-cd1a-4978-aae0-70efe957fab3 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] [instance: 153adb6e-5381-4e91-881e-8e566a16905a] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1804.327058] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8b5e2944-d5ab-4acf-8311-5a48cbde9d66 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.341040] env[62405]: DEBUG nova.compute.provider_tree [None req-7aa1a96d-62c9-4103-852f-07c7e30ac76e tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1804.344647] env[62405]: DEBUG oslo_vmware.api [None req-d467200b-cd1a-4978-aae0-70efe957fab3 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Waiting for the task: (returnval){ [ 1804.344647] env[62405]: value = "task-1947560" [ 1804.344647] env[62405]: _type = "Task" [ 1804.344647] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1804.355013] env[62405]: DEBUG oslo_vmware.api [None req-d467200b-cd1a-4978-aae0-70efe957fab3 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Task: {'id': task-1947560, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1804.381675] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f9f76991-3d7f-4093-aa61-e98012271936 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Releasing lock "refresh_cache-a1a84837-deef-4ffc-8a47-4891bfc2c87a" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1804.510782] env[62405]: DEBUG nova.network.neutron [req-7abb036e-25a5-4534-99d3-8f5d11362768 req-c8305441-fdb7-4793-b092-9abcbcaeaf96 service nova] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Updated VIF entry in instance network info cache for port dba92750-bf41-4683-b71d-128391ff29d0. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1804.511443] env[62405]: DEBUG nova.network.neutron [req-7abb036e-25a5-4534-99d3-8f5d11362768 req-c8305441-fdb7-4793-b092-9abcbcaeaf96 service nova] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Updating instance_info_cache with network_info: [{"id": "dba92750-bf41-4683-b71d-128391ff29d0", "address": "fa:16:3e:e6:78:c4", "network": {"id": "72000fdf-4f7a-4c95-a7ac-d8404249f55c", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-589425764-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "521150d8f23f4f76a0c785481c99e897", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "412cde91-d0f0-4193-b36b-d8b9d17384c6", "external-id": "nsx-vlan-transportzone-461", "segmentation_id": 461, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdba92750-bf", "ovs_interfaceid": "dba92750-bf41-4683-b71d-128391ff29d0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1804.527074] env[62405]: DEBUG oslo_vmware.api [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947558, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068089} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1804.527452] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: ff8731d6-3c55-4ddc-aeb1-308d72313881] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1804.528226] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87fd2b79-8a1f-450f-adea-531255951ebd {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.552117] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: ff8731d6-3c55-4ddc-aeb1-308d72313881] Reconfiguring VM instance instance-00000047 to attach disk [datastore1] ff8731d6-3c55-4ddc-aeb1-308d72313881/ff8731d6-3c55-4ddc-aeb1-308d72313881.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1804.555159] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-09dcbceb-661c-4cf7-9af3-fcd4ba0575d4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.586888] env[62405]: DEBUG oslo_vmware.api [None req-1bbc0e7c-e729-4e68-b84b-5fe70106549a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Task: {'id': task-1947559, 'name': PowerOnVM_Task, 'duration_secs': 0.4152} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1804.588316] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-1bbc0e7c-e729-4e68-b84b-5fe70106549a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 7256b956-e41a-40ec-a687-a129a8bafcb6] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1804.588543] env[62405]: DEBUG nova.compute.manager [None req-1bbc0e7c-e729-4e68-b84b-5fe70106549a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 7256b956-e41a-40ec-a687-a129a8bafcb6] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1804.588946] env[62405]: DEBUG oslo_vmware.api [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Waiting for the task: (returnval){ [ 1804.588946] env[62405]: value = "task-1947561" [ 1804.588946] env[62405]: _type = "Task" [ 1804.588946] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1804.589652] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81394920-1615-42dd-b5ef-da39d4f7c808 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.605335] env[62405]: DEBUG oslo_vmware.api [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947561, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1804.614110] env[62405]: DEBUG nova.network.neutron [None req-e2337ee4-aebc-4368-a415-d3997201c53c tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Updating instance_info_cache with network_info: [{"id": "7e786917-4e46-4359-899e-afc1456451ae", "address": "fa:16:3e:75:14:e2", "network": {"id": "3ca0a5a2-a18f-47fa-9d7b-0e27aa414878", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1312490542-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.142", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f3b50cc219314108945bfc8b2c21849a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7edb7c08-2fae-4df5-9ec6-5ccf06d7e337", "external-id": "nsx-vlan-transportzone-309", "segmentation_id": 309, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e786917-4e", "ovs_interfaceid": "7e786917-4e46-4359-899e-afc1456451ae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1804.742638] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947557, 'name': CreateVM_Task, 'duration_secs': 0.531582} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1804.743283] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1804.744050] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1804.744276] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1804.745166] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1804.745465] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2141d72d-8883-40cb-9836-097867f18d7d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.750301] env[62405]: DEBUG oslo_vmware.api [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Waiting for the task: (returnval){ [ 1804.750301] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52576a1b-94a1-05fa-c8be-a7cced74f0dd" [ 1804.750301] env[62405]: _type = "Task" [ 1804.750301] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1804.758664] env[62405]: DEBUG oslo_vmware.api [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52576a1b-94a1-05fa-c8be-a7cced74f0dd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1804.844649] env[62405]: DEBUG nova.scheduler.client.report [None req-7aa1a96d-62c9-4103-852f-07c7e30ac76e tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1804.859730] env[62405]: DEBUG oslo_vmware.api [None req-d467200b-cd1a-4978-aae0-70efe957fab3 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Task: {'id': task-1947560, 'name': PowerOffVM_Task, 'duration_secs': 0.297312} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1804.860077] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-d467200b-cd1a-4978-aae0-70efe957fab3 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] [instance: 153adb6e-5381-4e91-881e-8e566a16905a] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1804.860373] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-d467200b-cd1a-4978-aae0-70efe957fab3 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] [instance: 153adb6e-5381-4e91-881e-8e566a16905a] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1804.860705] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ba1d0f69-c0c2-4cad-a468-737ff1eca477 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.916527] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cefb275e-ef50-4daf-9b8a-ee2da596c394 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.923672] env[62405]: DEBUG nova.compute.manager [req-41b55bee-dcaf-458d-a2d5-f148f97c6801 req-77978a7e-0fac-4973-a4ed-2ba33272706d service nova] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Received event network-changed-09308517-a17c-48d3-b01f-fed73b19adfd {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1804.923874] env[62405]: DEBUG nova.compute.manager [req-41b55bee-dcaf-458d-a2d5-f148f97c6801 req-77978a7e-0fac-4973-a4ed-2ba33272706d service nova] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Refreshing instance network info cache due to event network-changed-09308517-a17c-48d3-b01f-fed73b19adfd. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1804.924122] env[62405]: DEBUG oslo_concurrency.lockutils [req-41b55bee-dcaf-458d-a2d5-f148f97c6801 req-77978a7e-0fac-4973-a4ed-2ba33272706d service nova] Acquiring lock "refresh_cache-9d97bf1d-6830-48b1-831b-bf2b52188f32" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1804.924275] env[62405]: DEBUG oslo_concurrency.lockutils [req-41b55bee-dcaf-458d-a2d5-f148f97c6801 req-77978a7e-0fac-4973-a4ed-2ba33272706d service nova] Acquired lock "refresh_cache-9d97bf1d-6830-48b1-831b-bf2b52188f32" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1804.924681] env[62405]: DEBUG nova.network.neutron [req-41b55bee-dcaf-458d-a2d5-f148f97c6801 req-77978a7e-0fac-4973-a4ed-2ba33272706d service nova] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Refreshing network info cache for port 09308517-a17c-48d3-b01f-fed73b19adfd {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1804.945681] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d163a23e-abd6-49ad-9da1-8b0d57380512 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.955129] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-f9f76991-3d7f-4093-aa61-e98012271936 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a1a84837-deef-4ffc-8a47-4891bfc2c87a] Updating instance 'a1a84837-deef-4ffc-8a47-4891bfc2c87a' progress to 83 {{(pid=62405) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1805.013903] env[62405]: DEBUG oslo_concurrency.lockutils [req-7abb036e-25a5-4534-99d3-8f5d11362768 req-c8305441-fdb7-4793-b092-9abcbcaeaf96 service nova] Releasing lock "refresh_cache-f16e3d13-6db6-4f61-b0e4-661856a9166b" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1805.104152] env[62405]: DEBUG oslo_vmware.api [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947561, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1805.116606] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e2337ee4-aebc-4368-a415-d3997201c53c tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Releasing lock "refresh_cache-15218373-ffa5-49ce-b604-423b7fc5fb35" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1805.235688] env[62405]: DEBUG nova.compute.manager [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 2ab5f28c-1f71-4bea-8733-523e5570f5c6] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1805.261711] env[62405]: DEBUG oslo_vmware.api [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52576a1b-94a1-05fa-c8be-a7cced74f0dd, 'name': SearchDatastore_Task, 'duration_secs': 0.012741} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1805.263936] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1805.264265] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1805.264556] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1805.264712] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1805.264896] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1805.265414] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5e4c30a4-bb3c-442b-8ec7-9bbba8bd08a6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.275847] env[62405]: DEBUG nova.virt.hardware [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='be70850392d497662ae156b52edc3b07',container_format='bare',created_at=2024-12-21T03:25:40Z,direct_url=,disk_format='vmdk',id=1e2c6626-50b1-4468-a3b7-982412fb92f3,min_disk=1,min_ram=0,name='tempest-test-snap-1743896204',owner='bb1da47e8b1a400fab7817d9e6b282ed',properties=ImageMetaProps,protected=,size=21334016,status='active',tags=,updated_at=2024-12-21T03:25:55Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1805.276101] env[62405]: DEBUG nova.virt.hardware [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1805.276706] env[62405]: DEBUG nova.virt.hardware [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1805.276706] env[62405]: DEBUG nova.virt.hardware [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1805.276706] env[62405]: DEBUG nova.virt.hardware [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1805.276892] env[62405]: DEBUG nova.virt.hardware [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1805.276951] env[62405]: DEBUG nova.virt.hardware [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1805.279112] env[62405]: DEBUG nova.virt.hardware [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1805.279328] env[62405]: DEBUG nova.virt.hardware [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1805.279512] env[62405]: DEBUG nova.virt.hardware [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1805.279699] env[62405]: DEBUG nova.virt.hardware [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1805.280600] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eee70984-a2c3-4ac4-8ea3-199cb0a523c6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.284605] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1805.284703] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1805.285745] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c6f30c72-f512-40a5-98b8-258de52764f2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.291108] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0958292-29ff-4468-acec-6dc6b6d2007c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.296044] env[62405]: DEBUG oslo_vmware.api [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Waiting for the task: (returnval){ [ 1805.296044] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52befe1f-1875-3d5c-4b59-324bc310fd2d" [ 1805.296044] env[62405]: _type = "Task" [ 1805.296044] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1805.311663] env[62405]: DEBUG oslo_vmware.api [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52befe1f-1875-3d5c-4b59-324bc310fd2d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1805.353918] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7aa1a96d-62c9-4103-852f-07c7e30ac76e tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.156s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1805.356349] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a884c06d-da5b-4163-bf48-4001a6f63e46 tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.518s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1805.356702] env[62405]: DEBUG nova.objects.instance [None req-a884c06d-da5b-4163-bf48-4001a6f63e46 tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Lazy-loading 'resources' on Instance uuid 777ddb84-25b9-4da6-be6b-a2289dbf510a {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1805.369711] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-d467200b-cd1a-4978-aae0-70efe957fab3 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] [instance: 153adb6e-5381-4e91-881e-8e566a16905a] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1805.369941] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-d467200b-cd1a-4978-aae0-70efe957fab3 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] [instance: 153adb6e-5381-4e91-881e-8e566a16905a] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1805.370160] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-d467200b-cd1a-4978-aae0-70efe957fab3 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Deleting the datastore file [datastore1] 153adb6e-5381-4e91-881e-8e566a16905a {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1805.370735] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f5fc3450-50b5-46ee-87e9-27682dfbeaff {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.377852] env[62405]: DEBUG oslo_vmware.api [None req-d467200b-cd1a-4978-aae0-70efe957fab3 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Waiting for the task: (returnval){ [ 1805.377852] env[62405]: value = "task-1947563" [ 1805.377852] env[62405]: _type = "Task" [ 1805.377852] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1805.383864] env[62405]: INFO nova.scheduler.client.report [None req-7aa1a96d-62c9-4103-852f-07c7e30ac76e tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Deleted allocations for instance 9b21fa71-8a0e-446a-9492-59e2b068237c [ 1805.390460] env[62405]: DEBUG oslo_vmware.api [None req-d467200b-cd1a-4978-aae0-70efe957fab3 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Task: {'id': task-1947563, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1805.463759] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9f76991-3d7f-4093-aa61-e98012271936 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a1a84837-deef-4ffc-8a47-4891bfc2c87a] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1805.464353] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d961cd18-886a-4d44-86cf-7d24e4f6327d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.474120] env[62405]: DEBUG oslo_vmware.api [None req-f9f76991-3d7f-4093-aa61-e98012271936 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for the task: (returnval){ [ 1805.474120] env[62405]: value = "task-1947564" [ 1805.474120] env[62405]: _type = "Task" [ 1805.474120] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1805.482154] env[62405]: DEBUG oslo_vmware.api [None req-f9f76991-3d7f-4093-aa61-e98012271936 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947564, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1805.602189] env[62405]: DEBUG oslo_vmware.api [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947561, 'name': ReconfigVM_Task, 'duration_secs': 0.776377} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1805.602570] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: ff8731d6-3c55-4ddc-aeb1-308d72313881] Reconfigured VM instance instance-00000047 to attach disk [datastore1] ff8731d6-3c55-4ddc-aeb1-308d72313881/ff8731d6-3c55-4ddc-aeb1-308d72313881.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1805.603248] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-33394e34-3497-4e13-8a3a-150502b50aa5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.610777] env[62405]: DEBUG oslo_vmware.api [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Waiting for the task: (returnval){ [ 1805.610777] env[62405]: value = "task-1947565" [ 1805.610777] env[62405]: _type = "Task" [ 1805.610777] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1805.619366] env[62405]: DEBUG oslo_vmware.api [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947565, 'name': Rename_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1805.659957] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6d672a4-da88-47fd-8ac0-d93c82161f32 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.682559] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce8d5273-24c0-49d8-b4ca-16385d837a34 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.690830] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-e2337ee4-aebc-4368-a415-d3997201c53c tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Updating instance '15218373-ffa5-49ce-b604-423b7fc5fb35' progress to 83 {{(pid=62405) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1805.808645] env[62405]: DEBUG oslo_vmware.api [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52befe1f-1875-3d5c-4b59-324bc310fd2d, 'name': SearchDatastore_Task, 'duration_secs': 0.056159} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1805.809378] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-15413114-c024-46b0-bc0c-6a2f5597fe63 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.814614] env[62405]: DEBUG oslo_vmware.api [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Waiting for the task: (returnval){ [ 1805.814614] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]525e7854-d61a-3679-5c78-93062703dd1b" [ 1805.814614] env[62405]: _type = "Task" [ 1805.814614] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1805.815367] env[62405]: DEBUG nova.network.neutron [req-41b55bee-dcaf-458d-a2d5-f148f97c6801 req-77978a7e-0fac-4973-a4ed-2ba33272706d service nova] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Updated VIF entry in instance network info cache for port 09308517-a17c-48d3-b01f-fed73b19adfd. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1805.815698] env[62405]: DEBUG nova.network.neutron [req-41b55bee-dcaf-458d-a2d5-f148f97c6801 req-77978a7e-0fac-4973-a4ed-2ba33272706d service nova] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Updating instance_info_cache with network_info: [{"id": "09308517-a17c-48d3-b01f-fed73b19adfd", "address": "fa:16:3e:d4:17:23", "network": {"id": "8e7f7222-48db-4dd5-a9e8-9a6d2b598918", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1945720715-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.200", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba9083cddcc24345b6ea5d2cbbbec5ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap09308517-a1", "ovs_interfaceid": "09308517-a17c-48d3-b01f-fed73b19adfd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1805.826051] env[62405]: DEBUG oslo_vmware.api [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]525e7854-d61a-3679-5c78-93062703dd1b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1805.854952] env[62405]: DEBUG nova.compute.manager [req-6b5c090b-4d09-453f-9eff-84c374c71239 req-9f4ebd82-4ff7-4cc3-a6ad-8f54ae1ac816 service nova] [instance: 2ab5f28c-1f71-4bea-8733-523e5570f5c6] Received event network-vif-plugged-e4459292-2e66-49b3-bca3-94dc7cd7afbc {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1805.855196] env[62405]: DEBUG oslo_concurrency.lockutils [req-6b5c090b-4d09-453f-9eff-84c374c71239 req-9f4ebd82-4ff7-4cc3-a6ad-8f54ae1ac816 service nova] Acquiring lock "2ab5f28c-1f71-4bea-8733-523e5570f5c6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1805.855405] env[62405]: DEBUG oslo_concurrency.lockutils [req-6b5c090b-4d09-453f-9eff-84c374c71239 req-9f4ebd82-4ff7-4cc3-a6ad-8f54ae1ac816 service nova] Lock "2ab5f28c-1f71-4bea-8733-523e5570f5c6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1805.855573] env[62405]: DEBUG oslo_concurrency.lockutils [req-6b5c090b-4d09-453f-9eff-84c374c71239 req-9f4ebd82-4ff7-4cc3-a6ad-8f54ae1ac816 service nova] Lock "2ab5f28c-1f71-4bea-8733-523e5570f5c6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1805.855743] env[62405]: DEBUG nova.compute.manager [req-6b5c090b-4d09-453f-9eff-84c374c71239 req-9f4ebd82-4ff7-4cc3-a6ad-8f54ae1ac816 service nova] [instance: 2ab5f28c-1f71-4bea-8733-523e5570f5c6] No waiting events found dispatching network-vif-plugged-e4459292-2e66-49b3-bca3-94dc7cd7afbc {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1805.855907] env[62405]: WARNING nova.compute.manager [req-6b5c090b-4d09-453f-9eff-84c374c71239 req-9f4ebd82-4ff7-4cc3-a6ad-8f54ae1ac816 service nova] [instance: 2ab5f28c-1f71-4bea-8733-523e5570f5c6] Received unexpected event network-vif-plugged-e4459292-2e66-49b3-bca3-94dc7cd7afbc for instance with vm_state building and task_state spawning. [ 1805.876594] env[62405]: DEBUG nova.network.neutron [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 2ab5f28c-1f71-4bea-8733-523e5570f5c6] Successfully updated port: e4459292-2e66-49b3-bca3-94dc7cd7afbc {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1805.888247] env[62405]: DEBUG oslo_vmware.api [None req-d467200b-cd1a-4978-aae0-70efe957fab3 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Task: {'id': task-1947563, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.397009} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1805.891553] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-d467200b-cd1a-4978-aae0-70efe957fab3 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1805.891741] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-d467200b-cd1a-4978-aae0-70efe957fab3 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] [instance: 153adb6e-5381-4e91-881e-8e566a16905a] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1805.891909] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-d467200b-cd1a-4978-aae0-70efe957fab3 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] [instance: 153adb6e-5381-4e91-881e-8e566a16905a] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1805.892091] env[62405]: INFO nova.compute.manager [None req-d467200b-cd1a-4978-aae0-70efe957fab3 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] [instance: 153adb6e-5381-4e91-881e-8e566a16905a] Took 1.59 seconds to destroy the instance on the hypervisor. [ 1805.892323] env[62405]: DEBUG oslo.service.loopingcall [None req-d467200b-cd1a-4978-aae0-70efe957fab3 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1805.895024] env[62405]: DEBUG nova.compute.manager [-] [instance: 153adb6e-5381-4e91-881e-8e566a16905a] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1805.895125] env[62405]: DEBUG nova.network.neutron [-] [instance: 153adb6e-5381-4e91-881e-8e566a16905a] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1805.897429] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7aa1a96d-62c9-4103-852f-07c7e30ac76e tempest-InstanceActionsV221TestJSON-920398123 tempest-InstanceActionsV221TestJSON-920398123-project-member] Lock "9b21fa71-8a0e-446a-9492-59e2b068237c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.484s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1805.987455] env[62405]: DEBUG oslo_vmware.api [None req-f9f76991-3d7f-4093-aa61-e98012271936 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947564, 'name': PowerOnVM_Task} progress is 94%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1806.121109] env[62405]: DEBUG oslo_vmware.api [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947565, 'name': Rename_Task, 'duration_secs': 0.1872} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1806.124711] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: ff8731d6-3c55-4ddc-aeb1-308d72313881] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1806.125299] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4d0c1c01-4392-4968-8876-3a0b60f17234 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.137807] env[62405]: DEBUG oslo_vmware.api [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Waiting for the task: (returnval){ [ 1806.137807] env[62405]: value = "task-1947566" [ 1806.137807] env[62405]: _type = "Task" [ 1806.137807] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1806.144829] env[62405]: DEBUG oslo_vmware.api [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947566, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1806.199642] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-e2337ee4-aebc-4368-a415-d3997201c53c tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Updating instance '15218373-ffa5-49ce-b604-423b7fc5fb35' progress to 100 {{(pid=62405) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1806.321906] env[62405]: DEBUG oslo_concurrency.lockutils [req-41b55bee-dcaf-458d-a2d5-f148f97c6801 req-77978a7e-0fac-4973-a4ed-2ba33272706d service nova] Releasing lock "refresh_cache-9d97bf1d-6830-48b1-831b-bf2b52188f32" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1806.328391] env[62405]: DEBUG oslo_vmware.api [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]525e7854-d61a-3679-5c78-93062703dd1b, 'name': SearchDatastore_Task, 'duration_secs': 0.015417} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1806.328651] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1806.328914] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] f16e3d13-6db6-4f61-b0e4-661856a9166b/f16e3d13-6db6-4f61-b0e4-661856a9166b.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1806.329190] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2b0844ff-7c61-4316-9b59-eaa88aabfca1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.336437] env[62405]: DEBUG oslo_vmware.api [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Waiting for the task: (returnval){ [ 1806.336437] env[62405]: value = "task-1947567" [ 1806.336437] env[62405]: _type = "Task" [ 1806.336437] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1806.346032] env[62405]: DEBUG oslo_vmware.api [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': task-1947567, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1806.379188] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Acquiring lock "refresh_cache-2ab5f28c-1f71-4bea-8733-523e5570f5c6" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1806.379388] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Acquired lock "refresh_cache-2ab5f28c-1f71-4bea-8733-523e5570f5c6" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1806.379559] env[62405]: DEBUG nova.network.neutron [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 2ab5f28c-1f71-4bea-8733-523e5570f5c6] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1806.442417] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0fdf67f-1b89-476c-8c6c-404669440e65 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.451544] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9b3dd0b-22d3-4630-95ba-d5b932e53a79 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.490345] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d774d5c5-88e4-4f61-916a-0511a9757cd7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.506333] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98dad0cc-9143-4a00-bb68-376b26e15f31 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.510603] env[62405]: DEBUG oslo_vmware.api [None req-f9f76991-3d7f-4093-aa61-e98012271936 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947564, 'name': PowerOnVM_Task, 'duration_secs': 0.597205} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1806.510936] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9f76991-3d7f-4093-aa61-e98012271936 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a1a84837-deef-4ffc-8a47-4891bfc2c87a] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1806.511167] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-f9f76991-3d7f-4093-aa61-e98012271936 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a1a84837-deef-4ffc-8a47-4891bfc2c87a] Updating instance 'a1a84837-deef-4ffc-8a47-4891bfc2c87a' progress to 100 {{(pid=62405) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1806.526889] env[62405]: DEBUG nova.compute.provider_tree [None req-a884c06d-da5b-4163-bf48-4001a6f63e46 tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1806.649370] env[62405]: DEBUG oslo_vmware.api [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947566, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1806.848948] env[62405]: DEBUG oslo_vmware.api [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': task-1947567, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1806.918887] env[62405]: DEBUG nova.network.neutron [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 2ab5f28c-1f71-4bea-8733-523e5570f5c6] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1807.029545] env[62405]: DEBUG nova.scheduler.client.report [None req-a884c06d-da5b-4163-bf48-4001a6f63e46 tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1807.112890] env[62405]: DEBUG nova.network.neutron [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 2ab5f28c-1f71-4bea-8733-523e5570f5c6] Updating instance_info_cache with network_info: [{"id": "e4459292-2e66-49b3-bca3-94dc7cd7afbc", "address": "fa:16:3e:ad:75:22", "network": {"id": "ac0e1447-1c61-4770-8006-3a99edc76f93", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1648941742-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bb1da47e8b1a400fab7817d9e6b282ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "298bb8ef-4765-494c-b157-7a349218bd1e", "external-id": "nsx-vlan-transportzone-905", "segmentation_id": 905, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape4459292-2e", "ovs_interfaceid": "e4459292-2e66-49b3-bca3-94dc7cd7afbc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1807.142673] env[62405]: DEBUG nova.network.neutron [-] [instance: 153adb6e-5381-4e91-881e-8e566a16905a] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1807.144470] env[62405]: DEBUG oslo_vmware.api [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947566, 'name': PowerOnVM_Task, 'duration_secs': 0.869483} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1807.144951] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: ff8731d6-3c55-4ddc-aeb1-308d72313881] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1807.145246] env[62405]: INFO nova.compute.manager [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: ff8731d6-3c55-4ddc-aeb1-308d72313881] Took 9.13 seconds to spawn the instance on the hypervisor. [ 1807.145471] env[62405]: DEBUG nova.compute.manager [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: ff8731d6-3c55-4ddc-aeb1-308d72313881] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1807.146257] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6676114-8a23-4d9f-bc17-46846f57beb4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.353702] env[62405]: DEBUG oslo_vmware.api [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': task-1947567, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.667382} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1807.354034] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] f16e3d13-6db6-4f61-b0e4-661856a9166b/f16e3d13-6db6-4f61-b0e4-661856a9166b.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1807.354791] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1807.355104] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3100158b-d510-4f73-855f-71f55f695dd9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.362430] env[62405]: DEBUG oslo_vmware.api [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Waiting for the task: (returnval){ [ 1807.362430] env[62405]: value = "task-1947568" [ 1807.362430] env[62405]: _type = "Task" [ 1807.362430] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1807.372726] env[62405]: DEBUG oslo_vmware.api [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': task-1947568, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1807.539448] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a884c06d-da5b-4163-bf48-4001a6f63e46 tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.183s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1807.544844] env[62405]: DEBUG oslo_concurrency.lockutils [None req-77b553dc-cca2-4316-818c-c6ea50255526 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.459s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1807.545168] env[62405]: DEBUG oslo_concurrency.lockutils [None req-77b553dc-cca2-4316-818c-c6ea50255526 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1807.549637] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.789s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1807.551277] env[62405]: INFO nova.compute.claims [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: af174cbf-3555-42b0-bacd-033f9ff46f08] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1807.581291] env[62405]: INFO nova.scheduler.client.report [None req-a884c06d-da5b-4163-bf48-4001a6f63e46 tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Deleted allocations for instance 777ddb84-25b9-4da6-be6b-a2289dbf510a [ 1807.589853] env[62405]: INFO nova.scheduler.client.report [None req-77b553dc-cca2-4316-818c-c6ea50255526 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Deleted allocations for instance 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3 [ 1807.616718] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Releasing lock "refresh_cache-2ab5f28c-1f71-4bea-8733-523e5570f5c6" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1807.617068] env[62405]: DEBUG nova.compute.manager [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 2ab5f28c-1f71-4bea-8733-523e5570f5c6] Instance network_info: |[{"id": "e4459292-2e66-49b3-bca3-94dc7cd7afbc", "address": "fa:16:3e:ad:75:22", "network": {"id": "ac0e1447-1c61-4770-8006-3a99edc76f93", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1648941742-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bb1da47e8b1a400fab7817d9e6b282ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "298bb8ef-4765-494c-b157-7a349218bd1e", "external-id": "nsx-vlan-transportzone-905", "segmentation_id": 905, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape4459292-2e", "ovs_interfaceid": "e4459292-2e66-49b3-bca3-94dc7cd7afbc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1807.617752] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 2ab5f28c-1f71-4bea-8733-523e5570f5c6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ad:75:22', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '298bb8ef-4765-494c-b157-7a349218bd1e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e4459292-2e66-49b3-bca3-94dc7cd7afbc', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1807.626601] env[62405]: DEBUG oslo.service.loopingcall [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1807.626943] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2ab5f28c-1f71-4bea-8733-523e5570f5c6] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1807.627621] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-629cf622-12ee-4182-ad2d-86f95b02f27d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.646609] env[62405]: INFO nova.compute.manager [-] [instance: 153adb6e-5381-4e91-881e-8e566a16905a] Took 1.75 seconds to deallocate network for instance. [ 1807.662490] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1807.662490] env[62405]: value = "task-1947569" [ 1807.662490] env[62405]: _type = "Task" [ 1807.662490] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1807.673521] env[62405]: INFO nova.compute.manager [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: ff8731d6-3c55-4ddc-aeb1-308d72313881] Took 55.47 seconds to build instance. [ 1807.681239] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947569, 'name': CreateVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1807.873164] env[62405]: DEBUG oslo_vmware.api [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': task-1947568, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.320497} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1807.873652] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1807.877023] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-591f648e-3e99-44a1-aef1-1dd0faf7d173 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.899815] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Reconfiguring VM instance instance-00000048 to attach disk [datastore1] f16e3d13-6db6-4f61-b0e4-661856a9166b/f16e3d13-6db6-4f61-b0e4-661856a9166b.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1807.900745] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-00fa34eb-9cb6-4881-92f2-9b142a70c1ac {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.925383] env[62405]: DEBUG oslo_vmware.api [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Waiting for the task: (returnval){ [ 1807.925383] env[62405]: value = "task-1947570" [ 1807.925383] env[62405]: _type = "Task" [ 1807.925383] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1807.934957] env[62405]: DEBUG oslo_vmware.api [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': task-1947570, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1807.948158] env[62405]: DEBUG nova.compute.manager [req-c9e61dd2-c10b-4204-b925-242a8096109b req-324f9178-aee2-4bc4-a1e0-0e7887dd8be8 service nova] [instance: 2ab5f28c-1f71-4bea-8733-523e5570f5c6] Received event network-changed-e4459292-2e66-49b3-bca3-94dc7cd7afbc {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1807.948158] env[62405]: DEBUG nova.compute.manager [req-c9e61dd2-c10b-4204-b925-242a8096109b req-324f9178-aee2-4bc4-a1e0-0e7887dd8be8 service nova] [instance: 2ab5f28c-1f71-4bea-8733-523e5570f5c6] Refreshing instance network info cache due to event network-changed-e4459292-2e66-49b3-bca3-94dc7cd7afbc. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1807.948158] env[62405]: DEBUG oslo_concurrency.lockutils [req-c9e61dd2-c10b-4204-b925-242a8096109b req-324f9178-aee2-4bc4-a1e0-0e7887dd8be8 service nova] Acquiring lock "refresh_cache-2ab5f28c-1f71-4bea-8733-523e5570f5c6" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1807.948158] env[62405]: DEBUG oslo_concurrency.lockutils [req-c9e61dd2-c10b-4204-b925-242a8096109b req-324f9178-aee2-4bc4-a1e0-0e7887dd8be8 service nova] Acquired lock "refresh_cache-2ab5f28c-1f71-4bea-8733-523e5570f5c6" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1807.948158] env[62405]: DEBUG nova.network.neutron [req-c9e61dd2-c10b-4204-b925-242a8096109b req-324f9178-aee2-4bc4-a1e0-0e7887dd8be8 service nova] [instance: 2ab5f28c-1f71-4bea-8733-523e5570f5c6] Refreshing network info cache for port e4459292-2e66-49b3-bca3-94dc7cd7afbc {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1808.092747] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a884c06d-da5b-4163-bf48-4001a6f63e46 tempest-VolumesAdminNegativeTest-1547043413 tempest-VolumesAdminNegativeTest-1547043413-project-member] Lock "777ddb84-25b9-4da6-be6b-a2289dbf510a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.790s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1808.096785] env[62405]: DEBUG oslo_concurrency.lockutils [None req-77b553dc-cca2-4316-818c-c6ea50255526 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Lock "3c9487ff-2092-4cde-82d5-b38e5bc5c6e3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.740s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1808.153259] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d467200b-cd1a-4978-aae0-70efe957fab3 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1808.174044] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947569, 'name': CreateVM_Task, 'duration_secs': 0.364635} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1808.175019] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2ab5f28c-1f71-4bea-8733-523e5570f5c6] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1808.175885] env[62405]: DEBUG oslo_concurrency.lockutils [None req-294126c4-5d2d-44d2-8a1f-1cf3250c34ce tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Lock "ff8731d6-3c55-4ddc-aeb1-308d72313881" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 65.929s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1808.176658] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1e2c6626-50b1-4468-a3b7-982412fb92f3" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1808.176823] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1e2c6626-50b1-4468-a3b7-982412fb92f3" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1808.177228] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1e2c6626-50b1-4468-a3b7-982412fb92f3" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1808.177799] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-84bc9cec-a785-4b32-bcdf-e13d956e4517 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.182666] env[62405]: DEBUG oslo_vmware.api [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Waiting for the task: (returnval){ [ 1808.182666] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52db936b-967f-e1b5-cc49-9be51ffedca4" [ 1808.182666] env[62405]: _type = "Task" [ 1808.182666] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1808.192260] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3ae74378-1806-4a46-8b31-966b987947fc tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Acquiring lock "15218373-ffa5-49ce-b604-423b7fc5fb35" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1808.192501] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3ae74378-1806-4a46-8b31-966b987947fc tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Lock "15218373-ffa5-49ce-b604-423b7fc5fb35" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1808.192683] env[62405]: DEBUG nova.compute.manager [None req-3ae74378-1806-4a46-8b31-966b987947fc tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Going to confirm migration 4 {{(pid=62405) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1808.194169] env[62405]: DEBUG oslo_vmware.api [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52db936b-967f-e1b5-cc49-9be51ffedca4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1808.253124] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f341608f-48d1-4559-b851-632f47c23d4a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Acquiring lock "48554024-9b6f-44be-b21e-615b25cd790c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1808.253352] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f341608f-48d1-4559-b851-632f47c23d4a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Lock "48554024-9b6f-44be-b21e-615b25cd790c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1808.253566] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f341608f-48d1-4559-b851-632f47c23d4a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Acquiring lock "48554024-9b6f-44be-b21e-615b25cd790c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1808.253750] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f341608f-48d1-4559-b851-632f47c23d4a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Lock "48554024-9b6f-44be-b21e-615b25cd790c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1808.253918] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f341608f-48d1-4559-b851-632f47c23d4a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Lock "48554024-9b6f-44be-b21e-615b25cd790c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1808.256871] env[62405]: INFO nova.compute.manager [None req-f341608f-48d1-4559-b851-632f47c23d4a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 48554024-9b6f-44be-b21e-615b25cd790c] Terminating instance [ 1808.436317] env[62405]: DEBUG oslo_vmware.api [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': task-1947570, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1808.698949] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1e2c6626-50b1-4468-a3b7-982412fb92f3" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1808.699440] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 2ab5f28c-1f71-4bea-8733-523e5570f5c6] Processing image 1e2c6626-50b1-4468-a3b7-982412fb92f3 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1808.699723] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1e2c6626-50b1-4468-a3b7-982412fb92f3/1e2c6626-50b1-4468-a3b7-982412fb92f3.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1808.699972] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1e2c6626-50b1-4468-a3b7-982412fb92f3/1e2c6626-50b1-4468-a3b7-982412fb92f3.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1808.700604] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1808.702314] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1094e249-e2c9-4458-90e0-e23484267642 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.716417] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1808.716634] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1808.717412] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bbc2f753-1f0f-4541-8753-5b60220774cc {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.727145] env[62405]: DEBUG oslo_vmware.api [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Waiting for the task: (returnval){ [ 1808.727145] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52ef3a18-65ed-6fcc-e2d5-b8d9b9d8101e" [ 1808.727145] env[62405]: _type = "Task" [ 1808.727145] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1808.736719] env[62405]: DEBUG oslo_vmware.api [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52ef3a18-65ed-6fcc-e2d5-b8d9b9d8101e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1808.746993] env[62405]: DEBUG nova.network.neutron [req-c9e61dd2-c10b-4204-b925-242a8096109b req-324f9178-aee2-4bc4-a1e0-0e7887dd8be8 service nova] [instance: 2ab5f28c-1f71-4bea-8733-523e5570f5c6] Updated VIF entry in instance network info cache for port e4459292-2e66-49b3-bca3-94dc7cd7afbc. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1808.746993] env[62405]: DEBUG nova.network.neutron [req-c9e61dd2-c10b-4204-b925-242a8096109b req-324f9178-aee2-4bc4-a1e0-0e7887dd8be8 service nova] [instance: 2ab5f28c-1f71-4bea-8733-523e5570f5c6] Updating instance_info_cache with network_info: [{"id": "e4459292-2e66-49b3-bca3-94dc7cd7afbc", "address": "fa:16:3e:ad:75:22", "network": {"id": "ac0e1447-1c61-4770-8006-3a99edc76f93", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1648941742-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bb1da47e8b1a400fab7817d9e6b282ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "298bb8ef-4765-494c-b157-7a349218bd1e", "external-id": "nsx-vlan-transportzone-905", "segmentation_id": 905, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape4459292-2e", "ovs_interfaceid": "e4459292-2e66-49b3-bca3-94dc7cd7afbc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1808.761082] env[62405]: DEBUG nova.compute.manager [None req-f341608f-48d1-4559-b851-632f47c23d4a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 48554024-9b6f-44be-b21e-615b25cd790c] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1808.761317] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-f341608f-48d1-4559-b851-632f47c23d4a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 48554024-9b6f-44be-b21e-615b25cd790c] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1808.762698] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fd4f584-4050-4e3f-9947-498f7f8d59ba {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.771407] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-f341608f-48d1-4559-b851-632f47c23d4a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 48554024-9b6f-44be-b21e-615b25cd790c] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1808.774891] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bd1b2415-3a92-421d-8f4f-bd13e58dc224 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.782062] env[62405]: DEBUG oslo_vmware.api [None req-f341608f-48d1-4559-b851-632f47c23d4a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Waiting for the task: (returnval){ [ 1808.782062] env[62405]: value = "task-1947571" [ 1808.782062] env[62405]: _type = "Task" [ 1808.782062] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1808.789532] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3ae74378-1806-4a46-8b31-966b987947fc tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Acquiring lock "refresh_cache-15218373-ffa5-49ce-b604-423b7fc5fb35" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1808.789532] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3ae74378-1806-4a46-8b31-966b987947fc tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Acquired lock "refresh_cache-15218373-ffa5-49ce-b604-423b7fc5fb35" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1808.789664] env[62405]: DEBUG nova.network.neutron [None req-3ae74378-1806-4a46-8b31-966b987947fc tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1808.791241] env[62405]: DEBUG nova.objects.instance [None req-3ae74378-1806-4a46-8b31-966b987947fc tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Lazy-loading 'info_cache' on Instance uuid 15218373-ffa5-49ce-b604-423b7fc5fb35 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1808.798488] env[62405]: DEBUG oslo_vmware.api [None req-f341608f-48d1-4559-b851-632f47c23d4a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Task: {'id': task-1947571, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1808.949993] env[62405]: DEBUG oslo_vmware.api [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': task-1947570, 'name': ReconfigVM_Task, 'duration_secs': 0.676471} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1808.949993] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Reconfigured VM instance instance-00000048 to attach disk [datastore1] f16e3d13-6db6-4f61-b0e4-661856a9166b/f16e3d13-6db6-4f61-b0e4-661856a9166b.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1808.950871] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-960bf28c-64c6-4d60-a6b5-9291670c659e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.960181] env[62405]: DEBUG oslo_vmware.api [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Waiting for the task: (returnval){ [ 1808.960181] env[62405]: value = "task-1947572" [ 1808.960181] env[62405]: _type = "Task" [ 1808.960181] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1808.972043] env[62405]: DEBUG oslo_vmware.api [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': task-1947572, 'name': Rename_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1809.115836] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ade5498-bdba-4ea3-b791-d80646300df9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.120711] env[62405]: DEBUG oslo_concurrency.lockutils [None req-dff5bdb5-848f-4927-b3b4-7be71adbaf65 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Acquiring lock "ff8731d6-3c55-4ddc-aeb1-308d72313881" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1809.121371] env[62405]: DEBUG oslo_concurrency.lockutils [None req-dff5bdb5-848f-4927-b3b4-7be71adbaf65 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Lock "ff8731d6-3c55-4ddc-aeb1-308d72313881" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1809.121850] env[62405]: DEBUG oslo_concurrency.lockutils [None req-dff5bdb5-848f-4927-b3b4-7be71adbaf65 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Acquiring lock "ff8731d6-3c55-4ddc-aeb1-308d72313881-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1809.122100] env[62405]: DEBUG oslo_concurrency.lockutils [None req-dff5bdb5-848f-4927-b3b4-7be71adbaf65 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Lock "ff8731d6-3c55-4ddc-aeb1-308d72313881-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1809.122277] env[62405]: DEBUG oslo_concurrency.lockutils [None req-dff5bdb5-848f-4927-b3b4-7be71adbaf65 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Lock "ff8731d6-3c55-4ddc-aeb1-308d72313881-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1809.126831] env[62405]: INFO nova.compute.manager [None req-dff5bdb5-848f-4927-b3b4-7be71adbaf65 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: ff8731d6-3c55-4ddc-aeb1-308d72313881] Terminating instance [ 1809.133679] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a1e1bb6-4aa0-4fd4-9a6d-a5efebc1c76a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.176404] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c90f0bef-12c8-4fdf-a492-901ede5c1648 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.186690] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7246fe88-115b-4a3a-a54a-454adbc7bf8d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.203850] env[62405]: DEBUG nova.compute.provider_tree [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1809.237685] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 2ab5f28c-1f71-4bea-8733-523e5570f5c6] Preparing fetch location {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1809.237981] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 2ab5f28c-1f71-4bea-8733-523e5570f5c6] Fetch image to [datastore1] OSTACK_IMG_f7033711-fc46-4f36-92b1-274dc0d1aee4/OSTACK_IMG_f7033711-fc46-4f36-92b1-274dc0d1aee4.vmdk {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1809.238181] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 2ab5f28c-1f71-4bea-8733-523e5570f5c6] Downloading stream optimized image 1e2c6626-50b1-4468-a3b7-982412fb92f3 to [datastore1] OSTACK_IMG_f7033711-fc46-4f36-92b1-274dc0d1aee4/OSTACK_IMG_f7033711-fc46-4f36-92b1-274dc0d1aee4.vmdk on the data store datastore1 as vApp {{(pid=62405) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1809.238364] env[62405]: DEBUG nova.virt.vmwareapi.images [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 2ab5f28c-1f71-4bea-8733-523e5570f5c6] Downloading image file data 1e2c6626-50b1-4468-a3b7-982412fb92f3 to the ESX as VM named 'OSTACK_IMG_f7033711-fc46-4f36-92b1-274dc0d1aee4' {{(pid=62405) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1809.251058] env[62405]: DEBUG oslo_concurrency.lockutils [req-c9e61dd2-c10b-4204-b925-242a8096109b req-324f9178-aee2-4bc4-a1e0-0e7887dd8be8 service nova] Releasing lock "refresh_cache-2ab5f28c-1f71-4bea-8733-523e5570f5c6" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1809.251290] env[62405]: DEBUG nova.compute.manager [req-c9e61dd2-c10b-4204-b925-242a8096109b req-324f9178-aee2-4bc4-a1e0-0e7887dd8be8 service nova] [instance: 153adb6e-5381-4e91-881e-8e566a16905a] Received event network-vif-deleted-aa6cc405-9347-42f7-8532-fbf0538c5ed8 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1809.251471] env[62405]: DEBUG nova.compute.manager [req-c9e61dd2-c10b-4204-b925-242a8096109b req-324f9178-aee2-4bc4-a1e0-0e7887dd8be8 service nova] [instance: 153adb6e-5381-4e91-881e-8e566a16905a] Received event network-vif-deleted-e9e348a3-5e47-451e-982d-38f1c834ac1b {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1809.251643] env[62405]: DEBUG nova.compute.manager [req-c9e61dd2-c10b-4204-b925-242a8096109b req-324f9178-aee2-4bc4-a1e0-0e7887dd8be8 service nova] [instance: 153adb6e-5381-4e91-881e-8e566a16905a] Received event network-vif-deleted-63dd773b-7125-4f8e-a520-2339dbdacbb2 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1809.299011] env[62405]: DEBUG oslo_vmware.api [None req-f341608f-48d1-4559-b851-632f47c23d4a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Task: {'id': task-1947571, 'name': PowerOffVM_Task, 'duration_secs': 0.343032} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1809.301279] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-f341608f-48d1-4559-b851-632f47c23d4a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 48554024-9b6f-44be-b21e-615b25cd790c] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1809.301462] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-f341608f-48d1-4559-b851-632f47c23d4a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 48554024-9b6f-44be-b21e-615b25cd790c] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1809.301921] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-284390f5-274e-4f8c-8633-a740d1739082 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.337015] env[62405]: DEBUG oslo_vmware.rw_handles [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1809.337015] env[62405]: value = "resgroup-9" [ 1809.337015] env[62405]: _type = "ResourcePool" [ 1809.337015] env[62405]: }. {{(pid=62405) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1809.337366] env[62405]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-a587f06f-3036-47a3-99e5-59ef92b1917b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.361352] env[62405]: DEBUG oslo_vmware.rw_handles [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Lease: (returnval){ [ 1809.361352] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52918245-18c8-3258-53a8-0f20b9489b66" [ 1809.361352] env[62405]: _type = "HttpNfcLease" [ 1809.361352] env[62405]: } obtained for vApp import into resource pool (val){ [ 1809.361352] env[62405]: value = "resgroup-9" [ 1809.361352] env[62405]: _type = "ResourcePool" [ 1809.361352] env[62405]: }. {{(pid=62405) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1809.361646] env[62405]: DEBUG oslo_vmware.api [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Waiting for the lease: (returnval){ [ 1809.361646] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52918245-18c8-3258-53a8-0f20b9489b66" [ 1809.361646] env[62405]: _type = "HttpNfcLease" [ 1809.361646] env[62405]: } to be ready. {{(pid=62405) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1809.368719] env[62405]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1809.368719] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52918245-18c8-3258-53a8-0f20b9489b66" [ 1809.368719] env[62405]: _type = "HttpNfcLease" [ 1809.368719] env[62405]: } is initializing. {{(pid=62405) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1809.446892] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3f2d050b-8bbd-4aa5-9294-b7a5804eb0e5 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquiring lock "a1a84837-deef-4ffc-8a47-4891bfc2c87a" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1809.447241] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3f2d050b-8bbd-4aa5-9294-b7a5804eb0e5 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Lock "a1a84837-deef-4ffc-8a47-4891bfc2c87a" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1809.447471] env[62405]: DEBUG nova.compute.manager [None req-3f2d050b-8bbd-4aa5-9294-b7a5804eb0e5 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a1a84837-deef-4ffc-8a47-4891bfc2c87a] Going to confirm migration 3 {{(pid=62405) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1809.463024] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-f341608f-48d1-4559-b851-632f47c23d4a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 48554024-9b6f-44be-b21e-615b25cd790c] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1809.463024] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-f341608f-48d1-4559-b851-632f47c23d4a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 48554024-9b6f-44be-b21e-615b25cd790c] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1809.463024] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-f341608f-48d1-4559-b851-632f47c23d4a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Deleting the datastore file [datastore1] 48554024-9b6f-44be-b21e-615b25cd790c {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1809.466098] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8106cd11-8332-42a9-b907-7779db0dd372 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.473782] env[62405]: DEBUG oslo_vmware.api [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': task-1947572, 'name': Rename_Task, 'duration_secs': 0.198745} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1809.475292] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1809.475636] env[62405]: DEBUG oslo_vmware.api [None req-f341608f-48d1-4559-b851-632f47c23d4a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Waiting for the task: (returnval){ [ 1809.475636] env[62405]: value = "task-1947575" [ 1809.475636] env[62405]: _type = "Task" [ 1809.475636] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1809.476307] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0ec27db0-27c6-40fd-b7a8-b4baa175ce3f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.490162] env[62405]: DEBUG oslo_vmware.api [None req-f341608f-48d1-4559-b851-632f47c23d4a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Task: {'id': task-1947575, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1809.492529] env[62405]: DEBUG oslo_vmware.api [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Waiting for the task: (returnval){ [ 1809.492529] env[62405]: value = "task-1947576" [ 1809.492529] env[62405]: _type = "Task" [ 1809.492529] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1809.502359] env[62405]: DEBUG oslo_vmware.api [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': task-1947576, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1809.639085] env[62405]: DEBUG nova.compute.manager [None req-dff5bdb5-848f-4927-b3b4-7be71adbaf65 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: ff8731d6-3c55-4ddc-aeb1-308d72313881] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1809.639365] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-dff5bdb5-848f-4927-b3b4-7be71adbaf65 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: ff8731d6-3c55-4ddc-aeb1-308d72313881] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1809.640384] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ab12888-2dbd-4259-9d4c-181beb39f1a8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.648795] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-dff5bdb5-848f-4927-b3b4-7be71adbaf65 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: ff8731d6-3c55-4ddc-aeb1-308d72313881] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1809.652131] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2c130e28-ac30-4a79-9a0a-eb2cee1b271b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.659081] env[62405]: DEBUG oslo_vmware.api [None req-dff5bdb5-848f-4927-b3b4-7be71adbaf65 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Waiting for the task: (returnval){ [ 1809.659081] env[62405]: value = "task-1947577" [ 1809.659081] env[62405]: _type = "Task" [ 1809.659081] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1809.667720] env[62405]: DEBUG oslo_vmware.api [None req-dff5bdb5-848f-4927-b3b4-7be71adbaf65 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947577, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1809.710012] env[62405]: DEBUG nova.scheduler.client.report [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1809.869706] env[62405]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1809.869706] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52918245-18c8-3258-53a8-0f20b9489b66" [ 1809.869706] env[62405]: _type = "HttpNfcLease" [ 1809.869706] env[62405]: } is initializing. {{(pid=62405) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1809.986454] env[62405]: DEBUG oslo_concurrency.lockutils [None req-00cb0911-d74c-4501-b9bb-7605299ed7c9 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Acquiring lock "058682a1-5240-4414-9203-c612ecd12999" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1809.986765] env[62405]: DEBUG oslo_concurrency.lockutils [None req-00cb0911-d74c-4501-b9bb-7605299ed7c9 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Lock "058682a1-5240-4414-9203-c612ecd12999" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1809.987910] env[62405]: DEBUG oslo_concurrency.lockutils [None req-00cb0911-d74c-4501-b9bb-7605299ed7c9 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Acquiring lock "058682a1-5240-4414-9203-c612ecd12999-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1809.988398] env[62405]: DEBUG oslo_concurrency.lockutils [None req-00cb0911-d74c-4501-b9bb-7605299ed7c9 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Lock "058682a1-5240-4414-9203-c612ecd12999-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1809.988398] env[62405]: DEBUG oslo_concurrency.lockutils [None req-00cb0911-d74c-4501-b9bb-7605299ed7c9 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Lock "058682a1-5240-4414-9203-c612ecd12999-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1809.991504] env[62405]: INFO nova.compute.manager [None req-00cb0911-d74c-4501-b9bb-7605299ed7c9 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 058682a1-5240-4414-9203-c612ecd12999] Terminating instance [ 1810.002929] env[62405]: DEBUG oslo_vmware.api [None req-f341608f-48d1-4559-b851-632f47c23d4a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Task: {'id': task-1947575, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.244285} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1810.008261] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-f341608f-48d1-4559-b851-632f47c23d4a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1810.008261] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-f341608f-48d1-4559-b851-632f47c23d4a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 48554024-9b6f-44be-b21e-615b25cd790c] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1810.008261] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-f341608f-48d1-4559-b851-632f47c23d4a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 48554024-9b6f-44be-b21e-615b25cd790c] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1810.008261] env[62405]: INFO nova.compute.manager [None req-f341608f-48d1-4559-b851-632f47c23d4a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 48554024-9b6f-44be-b21e-615b25cd790c] Took 1.24 seconds to destroy the instance on the hypervisor. [ 1810.008261] env[62405]: DEBUG oslo.service.loopingcall [None req-f341608f-48d1-4559-b851-632f47c23d4a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1810.008261] env[62405]: DEBUG nova.compute.manager [-] [instance: 48554024-9b6f-44be-b21e-615b25cd790c] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1810.008261] env[62405]: DEBUG nova.network.neutron [-] [instance: 48554024-9b6f-44be-b21e-615b25cd790c] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1810.013887] env[62405]: DEBUG oslo_vmware.api [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': task-1947576, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1810.176719] env[62405]: DEBUG oslo_vmware.api [None req-dff5bdb5-848f-4927-b3b4-7be71adbaf65 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947577, 'name': PowerOffVM_Task, 'duration_secs': 0.19094} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1810.177290] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-dff5bdb5-848f-4927-b3b4-7be71adbaf65 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: ff8731d6-3c55-4ddc-aeb1-308d72313881] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1810.177600] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-dff5bdb5-848f-4927-b3b4-7be71adbaf65 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: ff8731d6-3c55-4ddc-aeb1-308d72313881] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1810.177950] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-363205e8-b769-4477-90e0-8cbf392249b9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.189417] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3f2d050b-8bbd-4aa5-9294-b7a5804eb0e5 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquiring lock "refresh_cache-a1a84837-deef-4ffc-8a47-4891bfc2c87a" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1810.189417] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3f2d050b-8bbd-4aa5-9294-b7a5804eb0e5 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquired lock "refresh_cache-a1a84837-deef-4ffc-8a47-4891bfc2c87a" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1810.189596] env[62405]: DEBUG nova.network.neutron [None req-3f2d050b-8bbd-4aa5-9294-b7a5804eb0e5 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a1a84837-deef-4ffc-8a47-4891bfc2c87a] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1810.189884] env[62405]: DEBUG nova.objects.instance [None req-3f2d050b-8bbd-4aa5-9294-b7a5804eb0e5 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Lazy-loading 'info_cache' on Instance uuid a1a84837-deef-4ffc-8a47-4891bfc2c87a {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1810.216051] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.668s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1810.216689] env[62405]: DEBUG nova.compute.manager [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: af174cbf-3555-42b0-bacd-033f9ff46f08] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1810.219504] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.370s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1810.223147] env[62405]: INFO nova.compute.claims [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1810.288091] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-dff5bdb5-848f-4927-b3b4-7be71adbaf65 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: ff8731d6-3c55-4ddc-aeb1-308d72313881] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1810.288327] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-dff5bdb5-848f-4927-b3b4-7be71adbaf65 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: ff8731d6-3c55-4ddc-aeb1-308d72313881] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1810.288507] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-dff5bdb5-848f-4927-b3b4-7be71adbaf65 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Deleting the datastore file [datastore1] ff8731d6-3c55-4ddc-aeb1-308d72313881 {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1810.289081] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-22ee54df-4d9d-44cb-a7d7-63dbf0e3cddf {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.296047] env[62405]: DEBUG oslo_vmware.api [None req-dff5bdb5-848f-4927-b3b4-7be71adbaf65 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Waiting for the task: (returnval){ [ 1810.296047] env[62405]: value = "task-1947579" [ 1810.296047] env[62405]: _type = "Task" [ 1810.296047] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1810.307058] env[62405]: DEBUG oslo_vmware.api [None req-dff5bdb5-848f-4927-b3b4-7be71adbaf65 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947579, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1810.370948] env[62405]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1810.370948] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52918245-18c8-3258-53a8-0f20b9489b66" [ 1810.370948] env[62405]: _type = "HttpNfcLease" [ 1810.370948] env[62405]: } is ready. {{(pid=62405) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1810.371960] env[62405]: DEBUG nova.network.neutron [None req-3ae74378-1806-4a46-8b31-966b987947fc tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Updating instance_info_cache with network_info: [{"id": "7e786917-4e46-4359-899e-afc1456451ae", "address": "fa:16:3e:75:14:e2", "network": {"id": "3ca0a5a2-a18f-47fa-9d7b-0e27aa414878", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1312490542-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.142", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f3b50cc219314108945bfc8b2c21849a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7edb7c08-2fae-4df5-9ec6-5ccf06d7e337", "external-id": "nsx-vlan-transportzone-309", "segmentation_id": 309, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e786917-4e", "ovs_interfaceid": "7e786917-4e46-4359-899e-afc1456451ae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1810.373271] env[62405]: DEBUG oslo_vmware.rw_handles [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1810.373271] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52918245-18c8-3258-53a8-0f20b9489b66" [ 1810.373271] env[62405]: _type = "HttpNfcLease" [ 1810.373271] env[62405]: }. {{(pid=62405) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1810.377027] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c3410d5-6754-49b2-bdc2-e442532c86e5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.382607] env[62405]: DEBUG oslo_vmware.rw_handles [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52521dac-b166-ba2a-f4ee-3b4d26d53534/disk-0.vmdk from lease info. {{(pid=62405) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1810.382784] env[62405]: DEBUG oslo_vmware.rw_handles [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Creating HTTP connection to write to file with size = 21334016 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52521dac-b166-ba2a-f4ee-3b4d26d53534/disk-0.vmdk. {{(pid=62405) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1810.453313] env[62405]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-0d2ec0e3-aaab-46a2-93d0-331b67b9a1c1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.506861] env[62405]: DEBUG oslo_vmware.api [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': task-1947576, 'name': PowerOnVM_Task, 'duration_secs': 0.687091} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1810.506861] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1810.506861] env[62405]: INFO nova.compute.manager [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Took 9.92 seconds to spawn the instance on the hypervisor. [ 1810.506861] env[62405]: DEBUG nova.compute.manager [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1810.507730] env[62405]: DEBUG nova.compute.manager [None req-00cb0911-d74c-4501-b9bb-7605299ed7c9 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 058682a1-5240-4414-9203-c612ecd12999] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1810.507915] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-00cb0911-d74c-4501-b9bb-7605299ed7c9 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 058682a1-5240-4414-9203-c612ecd12999] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1810.508814] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bf354da-b0c0-4f59-a842-e70f2e03563f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.512934] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-821909a2-88a2-4b29-9011-e2af76fda2a3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.525017] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-00cb0911-d74c-4501-b9bb-7605299ed7c9 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 058682a1-5240-4414-9203-c612ecd12999] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1810.525498] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4c71c356-823a-41b5-92c2-247d226b89a2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.533270] env[62405]: DEBUG oslo_vmware.api [None req-00cb0911-d74c-4501-b9bb-7605299ed7c9 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Waiting for the task: (returnval){ [ 1810.533270] env[62405]: value = "task-1947580" [ 1810.533270] env[62405]: _type = "Task" [ 1810.533270] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1810.545898] env[62405]: DEBUG oslo_vmware.api [None req-00cb0911-d74c-4501-b9bb-7605299ed7c9 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Task: {'id': task-1947580, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1810.737501] env[62405]: DEBUG nova.compute.utils [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1810.742155] env[62405]: DEBUG nova.compute.manager [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: af174cbf-3555-42b0-bacd-033f9ff46f08] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1810.742376] env[62405]: DEBUG nova.network.neutron [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: af174cbf-3555-42b0-bacd-033f9ff46f08] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1810.791321] env[62405]: DEBUG nova.compute.manager [req-1ce8c9f1-fe24-465e-a4d2-8ac177359b9c req-b80b1d86-4dc4-4da8-b192-df06a5875493 service nova] [instance: 48554024-9b6f-44be-b21e-615b25cd790c] Received event network-vif-deleted-483a3097-a1d8-4e46-8db6-09591eb7e3d4 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1810.791532] env[62405]: INFO nova.compute.manager [req-1ce8c9f1-fe24-465e-a4d2-8ac177359b9c req-b80b1d86-4dc4-4da8-b192-df06a5875493 service nova] [instance: 48554024-9b6f-44be-b21e-615b25cd790c] Neutron deleted interface 483a3097-a1d8-4e46-8db6-09591eb7e3d4; detaching it from the instance and deleting it from the info cache [ 1810.791710] env[62405]: DEBUG nova.network.neutron [req-1ce8c9f1-fe24-465e-a4d2-8ac177359b9c req-b80b1d86-4dc4-4da8-b192-df06a5875493 service nova] [instance: 48554024-9b6f-44be-b21e-615b25cd790c] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1810.808571] env[62405]: DEBUG oslo_vmware.api [None req-dff5bdb5-848f-4927-b3b4-7be71adbaf65 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947579, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.312073} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1810.811118] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-dff5bdb5-848f-4927-b3b4-7be71adbaf65 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1810.811491] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-dff5bdb5-848f-4927-b3b4-7be71adbaf65 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: ff8731d6-3c55-4ddc-aeb1-308d72313881] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1810.811593] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-dff5bdb5-848f-4927-b3b4-7be71adbaf65 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: ff8731d6-3c55-4ddc-aeb1-308d72313881] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1810.811712] env[62405]: INFO nova.compute.manager [None req-dff5bdb5-848f-4927-b3b4-7be71adbaf65 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: ff8731d6-3c55-4ddc-aeb1-308d72313881] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1810.812245] env[62405]: DEBUG oslo.service.loopingcall [None req-dff5bdb5-848f-4927-b3b4-7be71adbaf65 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1810.813139] env[62405]: DEBUG nova.compute.manager [-] [instance: ff8731d6-3c55-4ddc-aeb1-308d72313881] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1810.813256] env[62405]: DEBUG nova.network.neutron [-] [instance: ff8731d6-3c55-4ddc-aeb1-308d72313881] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1810.816768] env[62405]: DEBUG nova.policy [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f5f866535fb94dd0b0ddddddd7da60b2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '41626e27199f4370a2554bb243a72d41', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1810.875853] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3ae74378-1806-4a46-8b31-966b987947fc tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Releasing lock "refresh_cache-15218373-ffa5-49ce-b604-423b7fc5fb35" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1810.876143] env[62405]: DEBUG nova.objects.instance [None req-3ae74378-1806-4a46-8b31-966b987947fc tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Lazy-loading 'migration_context' on Instance uuid 15218373-ffa5-49ce-b604-423b7fc5fb35 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1811.048574] env[62405]: INFO nova.compute.manager [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Took 48.58 seconds to build instance. [ 1811.049675] env[62405]: DEBUG nova.network.neutron [-] [instance: 48554024-9b6f-44be-b21e-615b25cd790c] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1811.056962] env[62405]: DEBUG oslo_vmware.api [None req-00cb0911-d74c-4501-b9bb-7605299ed7c9 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Task: {'id': task-1947580, 'name': PowerOffVM_Task, 'duration_secs': 0.247212} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1811.060184] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-00cb0911-d74c-4501-b9bb-7605299ed7c9 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 058682a1-5240-4414-9203-c612ecd12999] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1811.060184] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-00cb0911-d74c-4501-b9bb-7605299ed7c9 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 058682a1-5240-4414-9203-c612ecd12999] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1811.060399] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-00688d8d-ff5c-4900-91d9-5ba1e9aab15a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.140447] env[62405]: DEBUG oslo_vmware.rw_handles [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Completed reading data from the image iterator. {{(pid=62405) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1811.140447] env[62405]: DEBUG oslo_vmware.rw_handles [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52521dac-b166-ba2a-f4ee-3b4d26d53534/disk-0.vmdk. {{(pid=62405) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1811.140447] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a24e676e-5c57-43c1-8b7b-5df6468541a8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.149042] env[62405]: DEBUG oslo_vmware.rw_handles [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52521dac-b166-ba2a-f4ee-3b4d26d53534/disk-0.vmdk is in state: ready. {{(pid=62405) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1811.149424] env[62405]: DEBUG oslo_vmware.rw_handles [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Releasing lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52521dac-b166-ba2a-f4ee-3b4d26d53534/disk-0.vmdk. {{(pid=62405) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1811.149772] env[62405]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-0bee3bd5-bc5a-449d-a33d-1b5e826c2aa2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.186341] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-00cb0911-d74c-4501-b9bb-7605299ed7c9 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 058682a1-5240-4414-9203-c612ecd12999] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1811.186341] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-00cb0911-d74c-4501-b9bb-7605299ed7c9 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 058682a1-5240-4414-9203-c612ecd12999] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1811.186341] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-00cb0911-d74c-4501-b9bb-7605299ed7c9 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Deleting the datastore file [datastore1] 058682a1-5240-4414-9203-c612ecd12999 {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1811.186341] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9ea8dd15-87b8-47bf-a550-28ccaba4cf55 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.195860] env[62405]: DEBUG oslo_vmware.api [None req-00cb0911-d74c-4501-b9bb-7605299ed7c9 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Waiting for the task: (returnval){ [ 1811.195860] env[62405]: value = "task-1947582" [ 1811.195860] env[62405]: _type = "Task" [ 1811.195860] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1811.205806] env[62405]: DEBUG oslo_vmware.api [None req-00cb0911-d74c-4501-b9bb-7605299ed7c9 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Task: {'id': task-1947582, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1811.251026] env[62405]: DEBUG nova.compute.manager [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: af174cbf-3555-42b0-bacd-033f9ff46f08] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1811.296831] env[62405]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2e80ddbf-fd43-43da-bb59-91b14ac130a3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.317089] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46074a32-1cca-4c5d-904d-02a7134f3c25 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.332274] env[62405]: DEBUG nova.network.neutron [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: af174cbf-3555-42b0-bacd-033f9ff46f08] Successfully created port: bfaa42fd-a41b-4fd9-a12f-6b8599602de7 {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1811.359547] env[62405]: DEBUG nova.compute.manager [req-1ce8c9f1-fe24-465e-a4d2-8ac177359b9c req-b80b1d86-4dc4-4da8-b192-df06a5875493 service nova] [instance: 48554024-9b6f-44be-b21e-615b25cd790c] Detach interface failed, port_id=483a3097-a1d8-4e46-8db6-09591eb7e3d4, reason: Instance 48554024-9b6f-44be-b21e-615b25cd790c could not be found. {{(pid=62405) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11483}} [ 1811.381994] env[62405]: DEBUG nova.objects.base [None req-3ae74378-1806-4a46-8b31-966b987947fc tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Object Instance<15218373-ffa5-49ce-b604-423b7fc5fb35> lazy-loaded attributes: info_cache,migration_context {{(pid=62405) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1811.383412] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-517ec629-e922-475e-92b8-9f06f74b4432 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.405648] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6e34dbfd-6886-42e7-99f4-679131ee0535 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.414202] env[62405]: DEBUG oslo_vmware.api [None req-3ae74378-1806-4a46-8b31-966b987947fc tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for the task: (returnval){ [ 1811.414202] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5288c67f-f81d-8067-398f-0eea3bbbc9ea" [ 1811.414202] env[62405]: _type = "Task" [ 1811.414202] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1811.420814] env[62405]: DEBUG oslo_vmware.api [None req-3ae74378-1806-4a46-8b31-966b987947fc tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5288c67f-f81d-8067-398f-0eea3bbbc9ea, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1811.452328] env[62405]: DEBUG oslo_vmware.rw_handles [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Closed VMDK write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52521dac-b166-ba2a-f4ee-3b4d26d53534/disk-0.vmdk. {{(pid=62405) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1811.452808] env[62405]: INFO nova.virt.vmwareapi.images [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 2ab5f28c-1f71-4bea-8733-523e5570f5c6] Downloaded image file data 1e2c6626-50b1-4468-a3b7-982412fb92f3 [ 1811.455786] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a70ce95-590c-4d47-a493-71a5d72dfd1c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.481973] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-053e081c-64c7-4737-9a2c-5df14ff1d7d3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.533032] env[62405]: INFO nova.virt.vmwareapi.images [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 2ab5f28c-1f71-4bea-8733-523e5570f5c6] The imported VM was unregistered [ 1811.533032] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 2ab5f28c-1f71-4bea-8733-523e5570f5c6] Caching image {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1811.533032] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Creating directory with path [datastore1] devstack-image-cache_base/1e2c6626-50b1-4468-a3b7-982412fb92f3 {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1811.533597] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-df37a4e6-6022-4abd-bea6-c497766f386a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.544334] env[62405]: DEBUG nova.compute.manager [req-c367044e-9ddc-48d9-9a6d-225357c4cbca req-64fd61e1-883f-4cd0-a534-5b6c2dc9dc7a service nova] [instance: ff8731d6-3c55-4ddc-aeb1-308d72313881] Received event network-vif-deleted-6298bc3e-ce57-411b-ad57-d919400c0aa0 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1811.544895] env[62405]: INFO nova.compute.manager [req-c367044e-9ddc-48d9-9a6d-225357c4cbca req-64fd61e1-883f-4cd0-a534-5b6c2dc9dc7a service nova] [instance: ff8731d6-3c55-4ddc-aeb1-308d72313881] Neutron deleted interface 6298bc3e-ce57-411b-ad57-d919400c0aa0; detaching it from the instance and deleting it from the info cache [ 1811.545302] env[62405]: DEBUG nova.network.neutron [req-c367044e-9ddc-48d9-9a6d-225357c4cbca req-64fd61e1-883f-4cd0-a534-5b6c2dc9dc7a service nova] [instance: ff8731d6-3c55-4ddc-aeb1-308d72313881] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1811.554023] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5cd5d350-1ef5-4c4e-ae11-6fce19180a1a tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Lock "f16e3d13-6db6-4f61-b0e4-661856a9166b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 63.740s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1811.554023] env[62405]: INFO nova.compute.manager [-] [instance: 48554024-9b6f-44be-b21e-615b25cd790c] Took 1.55 seconds to deallocate network for instance. [ 1811.559984] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Created directory with path [datastore1] devstack-image-cache_base/1e2c6626-50b1-4468-a3b7-982412fb92f3 {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1811.560330] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_f7033711-fc46-4f36-92b1-274dc0d1aee4/OSTACK_IMG_f7033711-fc46-4f36-92b1-274dc0d1aee4.vmdk to [datastore1] devstack-image-cache_base/1e2c6626-50b1-4468-a3b7-982412fb92f3/1e2c6626-50b1-4468-a3b7-982412fb92f3.vmdk. {{(pid=62405) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1811.564352] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-798b2420-4d2e-444b-89ff-69aec884c7a1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.578185] env[62405]: DEBUG oslo_vmware.api [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Waiting for the task: (returnval){ [ 1811.578185] env[62405]: value = "task-1947584" [ 1811.578185] env[62405]: _type = "Task" [ 1811.578185] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1811.595539] env[62405]: DEBUG oslo_vmware.api [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947584, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1811.706649] env[62405]: DEBUG oslo_vmware.api [None req-00cb0911-d74c-4501-b9bb-7605299ed7c9 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Task: {'id': task-1947582, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.313628} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1811.706993] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-00cb0911-d74c-4501-b9bb-7605299ed7c9 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1811.707215] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-00cb0911-d74c-4501-b9bb-7605299ed7c9 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 058682a1-5240-4414-9203-c612ecd12999] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1811.707825] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-00cb0911-d74c-4501-b9bb-7605299ed7c9 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 058682a1-5240-4414-9203-c612ecd12999] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1811.707825] env[62405]: INFO nova.compute.manager [None req-00cb0911-d74c-4501-b9bb-7605299ed7c9 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] [instance: 058682a1-5240-4414-9203-c612ecd12999] Took 1.20 seconds to destroy the instance on the hypervisor. [ 1811.708018] env[62405]: DEBUG oslo.service.loopingcall [None req-00cb0911-d74c-4501-b9bb-7605299ed7c9 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1811.708381] env[62405]: DEBUG nova.compute.manager [-] [instance: 058682a1-5240-4414-9203-c612ecd12999] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1811.708502] env[62405]: DEBUG nova.network.neutron [-] [instance: 058682a1-5240-4414-9203-c612ecd12999] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1811.723381] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-209b4f87-0c5c-4af0-8b4b-58c4d0bb7f11 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.731208] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b666c25-a8f0-427b-abde-c8afedea5724 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.772906] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c365aead-f831-4a38-975d-6b7c1718381c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.782782] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-301a67f5-02e1-414e-92a2-e540492db0ce {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.799169] env[62405]: DEBUG nova.compute.provider_tree [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1811.885678] env[62405]: DEBUG nova.network.neutron [None req-3f2d050b-8bbd-4aa5-9294-b7a5804eb0e5 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a1a84837-deef-4ffc-8a47-4891bfc2c87a] Updating instance_info_cache with network_info: [{"id": "64634a81-f1e1-4078-894a-2f4e8b56de13", "address": "fa:16:3e:e0:c1:e2", "network": {"id": "845c4582-a0c8-4565-8025-5cc0fd22ff9a", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1066509768-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f1a1645e38674042828c78155974f95e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap64634a81-f1", "ovs_interfaceid": "64634a81-f1e1-4078-894a-2f4e8b56de13", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1811.921860] env[62405]: DEBUG oslo_vmware.api [None req-3ae74378-1806-4a46-8b31-966b987947fc tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5288c67f-f81d-8067-398f-0eea3bbbc9ea, 'name': SearchDatastore_Task, 'duration_secs': 0.019237} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1811.922215] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3ae74378-1806-4a46-8b31-966b987947fc tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1812.013531] env[62405]: DEBUG nova.network.neutron [-] [instance: ff8731d6-3c55-4ddc-aeb1-308d72313881] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1812.047630] env[62405]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-79cf7629-d834-47cf-af3f-b3f93485c48e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.058202] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8acd3daa-3e59-468c-8d41-62e59d1a0bfe {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.069970] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f341608f-48d1-4559-b851-632f47c23d4a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1812.094383] env[62405]: DEBUG nova.compute.manager [req-c367044e-9ddc-48d9-9a6d-225357c4cbca req-64fd61e1-883f-4cd0-a534-5b6c2dc9dc7a service nova] [instance: ff8731d6-3c55-4ddc-aeb1-308d72313881] Detach interface failed, port_id=6298bc3e-ce57-411b-ad57-d919400c0aa0, reason: Instance ff8731d6-3c55-4ddc-aeb1-308d72313881 could not be found. {{(pid=62405) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11483}} [ 1812.104638] env[62405]: DEBUG oslo_vmware.api [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947584, 'name': MoveVirtualDisk_Task} progress is 9%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1812.280686] env[62405]: DEBUG nova.compute.manager [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: af174cbf-3555-42b0-bacd-033f9ff46f08] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1812.307683] env[62405]: DEBUG nova.scheduler.client.report [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1812.326342] env[62405]: DEBUG nova.virt.hardware [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1812.326891] env[62405]: DEBUG nova.virt.hardware [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1812.327206] env[62405]: DEBUG nova.virt.hardware [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1812.327563] env[62405]: DEBUG nova.virt.hardware [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1812.327846] env[62405]: DEBUG nova.virt.hardware [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1812.328111] env[62405]: DEBUG nova.virt.hardware [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1812.328597] env[62405]: DEBUG nova.virt.hardware [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1812.328904] env[62405]: DEBUG nova.virt.hardware [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1812.329200] env[62405]: DEBUG nova.virt.hardware [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1812.330599] env[62405]: DEBUG nova.virt.hardware [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1812.330599] env[62405]: DEBUG nova.virt.hardware [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1812.331049] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02d81345-ec4a-4d4b-ad20-05fb42027926 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.340479] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-853f414c-1157-4954-acf7-bc6aef03fbc0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.396478] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3f2d050b-8bbd-4aa5-9294-b7a5804eb0e5 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Releasing lock "refresh_cache-a1a84837-deef-4ffc-8a47-4891bfc2c87a" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1812.396478] env[62405]: DEBUG nova.objects.instance [None req-3f2d050b-8bbd-4aa5-9294-b7a5804eb0e5 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Lazy-loading 'migration_context' on Instance uuid a1a84837-deef-4ffc-8a47-4891bfc2c87a {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1812.514827] env[62405]: INFO nova.compute.manager [-] [instance: ff8731d6-3c55-4ddc-aeb1-308d72313881] Took 1.70 seconds to deallocate network for instance. [ 1812.591495] env[62405]: DEBUG oslo_vmware.api [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947584, 'name': MoveVirtualDisk_Task} progress is 26%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1812.705795] env[62405]: DEBUG nova.network.neutron [-] [instance: 058682a1-5240-4414-9203-c612ecd12999] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1812.820223] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.600s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1812.820223] env[62405]: DEBUG nova.compute.manager [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1812.822662] env[62405]: DEBUG oslo_concurrency.lockutils [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.832s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1812.824177] env[62405]: INFO nova.compute.claims [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: 00158b10-4292-48f3-85a0-991af1dbc5f1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1812.829658] env[62405]: DEBUG nova.compute.manager [req-b6e968e8-5dc3-4223-953a-7741173757fe req-b254f137-1812-4a7e-b931-e30978414e48 service nova] [instance: af174cbf-3555-42b0-bacd-033f9ff46f08] Received event network-vif-plugged-bfaa42fd-a41b-4fd9-a12f-6b8599602de7 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1812.829658] env[62405]: DEBUG oslo_concurrency.lockutils [req-b6e968e8-5dc3-4223-953a-7741173757fe req-b254f137-1812-4a7e-b931-e30978414e48 service nova] Acquiring lock "af174cbf-3555-42b0-bacd-033f9ff46f08-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1812.829658] env[62405]: DEBUG oslo_concurrency.lockutils [req-b6e968e8-5dc3-4223-953a-7741173757fe req-b254f137-1812-4a7e-b931-e30978414e48 service nova] Lock "af174cbf-3555-42b0-bacd-033f9ff46f08-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1812.829658] env[62405]: DEBUG oslo_concurrency.lockutils [req-b6e968e8-5dc3-4223-953a-7741173757fe req-b254f137-1812-4a7e-b931-e30978414e48 service nova] Lock "af174cbf-3555-42b0-bacd-033f9ff46f08-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1812.829804] env[62405]: DEBUG nova.compute.manager [req-b6e968e8-5dc3-4223-953a-7741173757fe req-b254f137-1812-4a7e-b931-e30978414e48 service nova] [instance: af174cbf-3555-42b0-bacd-033f9ff46f08] No waiting events found dispatching network-vif-plugged-bfaa42fd-a41b-4fd9-a12f-6b8599602de7 {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1812.830249] env[62405]: WARNING nova.compute.manager [req-b6e968e8-5dc3-4223-953a-7741173757fe req-b254f137-1812-4a7e-b931-e30978414e48 service nova] [instance: af174cbf-3555-42b0-bacd-033f9ff46f08] Received unexpected event network-vif-plugged-bfaa42fd-a41b-4fd9-a12f-6b8599602de7 for instance with vm_state building and task_state spawning. [ 1812.896446] env[62405]: DEBUG nova.objects.base [None req-3f2d050b-8bbd-4aa5-9294-b7a5804eb0e5 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=62405) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1812.898597] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d19020fb-6d62-4e14-90ac-a0d531db37a6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.928381] env[62405]: DEBUG nova.network.neutron [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: af174cbf-3555-42b0-bacd-033f9ff46f08] Successfully updated port: bfaa42fd-a41b-4fd9-a12f-6b8599602de7 {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1812.930559] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-db427196-2053-465a-8208-79f0b01dde68 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.937051] env[62405]: DEBUG oslo_vmware.api [None req-3f2d050b-8bbd-4aa5-9294-b7a5804eb0e5 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for the task: (returnval){ [ 1812.937051] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52f25061-473b-d186-abd7-196d95d20420" [ 1812.937051] env[62405]: _type = "Task" [ 1812.937051] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1812.950026] env[62405]: DEBUG oslo_vmware.api [None req-3f2d050b-8bbd-4aa5-9294-b7a5804eb0e5 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52f25061-473b-d186-abd7-196d95d20420, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1813.025366] env[62405]: DEBUG oslo_concurrency.lockutils [None req-dff5bdb5-848f-4927-b3b4-7be71adbaf65 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1813.074152] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3f4a7c9f-6417-4f6d-b761-d9764b134ea4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Acquiring lock "271cec64-e7b4-4a1b-a7d6-f3fd60086209" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1813.074858] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3f4a7c9f-6417-4f6d-b761-d9764b134ea4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Lock "271cec64-e7b4-4a1b-a7d6-f3fd60086209" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1813.092205] env[62405]: DEBUG oslo_vmware.api [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947584, 'name': MoveVirtualDisk_Task} progress is 46%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1813.208177] env[62405]: INFO nova.compute.manager [-] [instance: 058682a1-5240-4414-9203-c612ecd12999] Took 1.50 seconds to deallocate network for instance. [ 1813.333720] env[62405]: DEBUG nova.compute.utils [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1813.335185] env[62405]: DEBUG nova.compute.manager [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1813.335390] env[62405]: DEBUG nova.network.neutron [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1813.382836] env[62405]: DEBUG nova.policy [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '766cbfd2f4944dc5b4bb3c210c4c6a95', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a949e89f885745acb15d0afd4893ce68', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1813.433969] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquiring lock "refresh_cache-af174cbf-3555-42b0-bacd-033f9ff46f08" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1813.433969] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquired lock "refresh_cache-af174cbf-3555-42b0-bacd-033f9ff46f08" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1813.433969] env[62405]: DEBUG nova.network.neutron [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: af174cbf-3555-42b0-bacd-033f9ff46f08] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1813.450522] env[62405]: DEBUG oslo_vmware.api [None req-3f2d050b-8bbd-4aa5-9294-b7a5804eb0e5 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52f25061-473b-d186-abd7-196d95d20420, 'name': SearchDatastore_Task, 'duration_secs': 0.04784} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1813.451841] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3f2d050b-8bbd-4aa5-9294-b7a5804eb0e5 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1813.579595] env[62405]: DEBUG nova.compute.manager [None req-3f4a7c9f-6417-4f6d-b761-d9764b134ea4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: 271cec64-e7b4-4a1b-a7d6-f3fd60086209] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1813.593702] env[62405]: DEBUG oslo_vmware.api [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947584, 'name': MoveVirtualDisk_Task} progress is 66%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1813.717861] env[62405]: DEBUG oslo_concurrency.lockutils [None req-00cb0911-d74c-4501-b9bb-7605299ed7c9 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1813.726202] env[62405]: DEBUG nova.compute.manager [req-4c2d059a-01b7-47ab-837a-766bf6ff7a5a req-4fb4d5b3-4bae-4c2a-964a-3334f8f7a6d5 service nova] [instance: 058682a1-5240-4414-9203-c612ecd12999] Received event network-vif-deleted-f2f99aa3-770a-41cb-bb49-775f9f0f2708 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1813.839280] env[62405]: DEBUG nova.network.neutron [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d] Successfully created port: a051e2aa-2501-4f7a-82b2-25f0988776c6 {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1813.841384] env[62405]: DEBUG nova.compute.manager [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1813.973746] env[62405]: DEBUG nova.network.neutron [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: af174cbf-3555-42b0-bacd-033f9ff46f08] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1814.101771] env[62405]: DEBUG oslo_vmware.api [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947584, 'name': MoveVirtualDisk_Task} progress is 83%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1814.106313] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3f4a7c9f-6417-4f6d-b761-d9764b134ea4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1814.170748] env[62405]: DEBUG nova.network.neutron [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: af174cbf-3555-42b0-bacd-033f9ff46f08] Updating instance_info_cache with network_info: [{"id": "bfaa42fd-a41b-4fd9-a12f-6b8599602de7", "address": "fa:16:3e:0d:c8:4f", "network": {"id": "7398b956-045a-440c-b8fc-34bad57dbc27", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1969082667-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "41626e27199f4370a2554bb243a72d41", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "50171613-b419-45e3-9ada-fcb6cd921428", "external-id": "nsx-vlan-transportzone-914", "segmentation_id": 914, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbfaa42fd-a4", "ovs_interfaceid": "bfaa42fd-a41b-4fd9-a12f-6b8599602de7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1814.292895] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dccb619-843c-4adc-8837-75811122a9af {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.303061] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14a9ecbf-fb8a-474d-bcc4-e99819679113 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.341628] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b009247-dcbc-4ecf-b97f-47ae8aa6516e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.354465] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48628ab4-f459-4e36-803c-d022f4f7ad65 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.368995] env[62405]: DEBUG nova.compute.provider_tree [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1814.597017] env[62405]: DEBUG oslo_vmware.api [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947584, 'name': MoveVirtualDisk_Task} progress is 100%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1814.675072] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Releasing lock "refresh_cache-af174cbf-3555-42b0-bacd-033f9ff46f08" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1814.675445] env[62405]: DEBUG nova.compute.manager [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: af174cbf-3555-42b0-bacd-033f9ff46f08] Instance network_info: |[{"id": "bfaa42fd-a41b-4fd9-a12f-6b8599602de7", "address": "fa:16:3e:0d:c8:4f", "network": {"id": "7398b956-045a-440c-b8fc-34bad57dbc27", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1969082667-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "41626e27199f4370a2554bb243a72d41", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "50171613-b419-45e3-9ada-fcb6cd921428", "external-id": "nsx-vlan-transportzone-914", "segmentation_id": 914, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbfaa42fd-a4", "ovs_interfaceid": "bfaa42fd-a41b-4fd9-a12f-6b8599602de7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1814.675871] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: af174cbf-3555-42b0-bacd-033f9ff46f08] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0d:c8:4f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '50171613-b419-45e3-9ada-fcb6cd921428', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bfaa42fd-a41b-4fd9-a12f-6b8599602de7', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1814.690068] env[62405]: DEBUG oslo.service.loopingcall [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1814.690337] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: af174cbf-3555-42b0-bacd-033f9ff46f08] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1814.690593] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1531604a-6fc5-41a5-97c9-a2dfa9002fd4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.722327] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1814.722327] env[62405]: value = "task-1947585" [ 1814.722327] env[62405]: _type = "Task" [ 1814.722327] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1814.731041] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947585, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1814.851851] env[62405]: DEBUG nova.compute.manager [req-2b07a109-f0cf-431e-83dd-672d92c5c1ff req-fc25a459-e4b9-4af2-807c-9606e5b77142 service nova] [instance: af174cbf-3555-42b0-bacd-033f9ff46f08] Received event network-changed-bfaa42fd-a41b-4fd9-a12f-6b8599602de7 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1814.852063] env[62405]: DEBUG nova.compute.manager [req-2b07a109-f0cf-431e-83dd-672d92c5c1ff req-fc25a459-e4b9-4af2-807c-9606e5b77142 service nova] [instance: af174cbf-3555-42b0-bacd-033f9ff46f08] Refreshing instance network info cache due to event network-changed-bfaa42fd-a41b-4fd9-a12f-6b8599602de7. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1814.852287] env[62405]: DEBUG oslo_concurrency.lockutils [req-2b07a109-f0cf-431e-83dd-672d92c5c1ff req-fc25a459-e4b9-4af2-807c-9606e5b77142 service nova] Acquiring lock "refresh_cache-af174cbf-3555-42b0-bacd-033f9ff46f08" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1814.852433] env[62405]: DEBUG oslo_concurrency.lockutils [req-2b07a109-f0cf-431e-83dd-672d92c5c1ff req-fc25a459-e4b9-4af2-807c-9606e5b77142 service nova] Acquired lock "refresh_cache-af174cbf-3555-42b0-bacd-033f9ff46f08" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1814.852591] env[62405]: DEBUG nova.network.neutron [req-2b07a109-f0cf-431e-83dd-672d92c5c1ff req-fc25a459-e4b9-4af2-807c-9606e5b77142 service nova] [instance: af174cbf-3555-42b0-bacd-033f9ff46f08] Refreshing network info cache for port bfaa42fd-a41b-4fd9-a12f-6b8599602de7 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1814.854799] env[62405]: DEBUG nova.compute.manager [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1814.871418] env[62405]: DEBUG nova.scheduler.client.report [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1814.882947] env[62405]: DEBUG nova.virt.hardware [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1814.883193] env[62405]: DEBUG nova.virt.hardware [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1814.883352] env[62405]: DEBUG nova.virt.hardware [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1814.883531] env[62405]: DEBUG nova.virt.hardware [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1814.883697] env[62405]: DEBUG nova.virt.hardware [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1814.883908] env[62405]: DEBUG nova.virt.hardware [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1814.884136] env[62405]: DEBUG nova.virt.hardware [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1814.884300] env[62405]: DEBUG nova.virt.hardware [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1814.884474] env[62405]: DEBUG nova.virt.hardware [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1814.884647] env[62405]: DEBUG nova.virt.hardware [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1814.884824] env[62405]: DEBUG nova.virt.hardware [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1814.885680] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68fb0c6f-ef3b-4508-88dd-d2b60cd979db {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.894596] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18b1ef49-d164-4289-9a97-539ce4b100cf {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.097324] env[62405]: DEBUG oslo_vmware.api [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947584, 'name': MoveVirtualDisk_Task, 'duration_secs': 3.052983} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1815.097597] env[62405]: INFO nova.virt.vmwareapi.ds_util [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_f7033711-fc46-4f36-92b1-274dc0d1aee4/OSTACK_IMG_f7033711-fc46-4f36-92b1-274dc0d1aee4.vmdk to [datastore1] devstack-image-cache_base/1e2c6626-50b1-4468-a3b7-982412fb92f3/1e2c6626-50b1-4468-a3b7-982412fb92f3.vmdk. [ 1815.097817] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 2ab5f28c-1f71-4bea-8733-523e5570f5c6] Cleaning up location [datastore1] OSTACK_IMG_f7033711-fc46-4f36-92b1-274dc0d1aee4 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1815.097999] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_f7033711-fc46-4f36-92b1-274dc0d1aee4 {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1815.098268] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-713200bb-de3b-4cb8-a26d-73f1bc9e152f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.104307] env[62405]: DEBUG oslo_vmware.api [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Waiting for the task: (returnval){ [ 1815.104307] env[62405]: value = "task-1947586" [ 1815.104307] env[62405]: _type = "Task" [ 1815.104307] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1815.113875] env[62405]: DEBUG oslo_vmware.api [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947586, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1815.232061] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947585, 'name': CreateVM_Task, 'duration_secs': 0.471215} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1815.232237] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: af174cbf-3555-42b0-bacd-033f9ff46f08] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1815.232933] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1815.233142] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1815.233461] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1815.233702] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7b66470b-8e24-4825-870f-c77a199a2205 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.238232] env[62405]: DEBUG oslo_vmware.api [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for the task: (returnval){ [ 1815.238232] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52dfa72e-b562-9160-f07f-f1558fb45420" [ 1815.238232] env[62405]: _type = "Task" [ 1815.238232] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1815.245990] env[62405]: DEBUG oslo_vmware.api [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52dfa72e-b562-9160-f07f-f1558fb45420, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1815.377169] env[62405]: DEBUG oslo_concurrency.lockutils [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.554s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1815.377693] env[62405]: DEBUG nova.compute.manager [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: 00158b10-4292-48f3-85a0-991af1dbc5f1] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1815.380999] env[62405]: DEBUG oslo_concurrency.lockutils [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.871s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1815.383284] env[62405]: INFO nova.compute.claims [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: b4b89cf6-4159-40fa-8b67-4d8bbf16eb32] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1815.578943] env[62405]: DEBUG nova.network.neutron [req-2b07a109-f0cf-431e-83dd-672d92c5c1ff req-fc25a459-e4b9-4af2-807c-9606e5b77142 service nova] [instance: af174cbf-3555-42b0-bacd-033f9ff46f08] Updated VIF entry in instance network info cache for port bfaa42fd-a41b-4fd9-a12f-6b8599602de7. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1815.579387] env[62405]: DEBUG nova.network.neutron [req-2b07a109-f0cf-431e-83dd-672d92c5c1ff req-fc25a459-e4b9-4af2-807c-9606e5b77142 service nova] [instance: af174cbf-3555-42b0-bacd-033f9ff46f08] Updating instance_info_cache with network_info: [{"id": "bfaa42fd-a41b-4fd9-a12f-6b8599602de7", "address": "fa:16:3e:0d:c8:4f", "network": {"id": "7398b956-045a-440c-b8fc-34bad57dbc27", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1969082667-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "41626e27199f4370a2554bb243a72d41", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "50171613-b419-45e3-9ada-fcb6cd921428", "external-id": "nsx-vlan-transportzone-914", "segmentation_id": 914, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbfaa42fd-a4", "ovs_interfaceid": "bfaa42fd-a41b-4fd9-a12f-6b8599602de7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1815.618219] env[62405]: DEBUG oslo_vmware.api [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947586, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.037179} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1815.619017] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1815.619614] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1e2c6626-50b1-4468-a3b7-982412fb92f3/1e2c6626-50b1-4468-a3b7-982412fb92f3.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1815.620011] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1e2c6626-50b1-4468-a3b7-982412fb92f3/1e2c6626-50b1-4468-a3b7-982412fb92f3.vmdk to [datastore1] 2ab5f28c-1f71-4bea-8733-523e5570f5c6/2ab5f28c-1f71-4bea-8733-523e5570f5c6.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1815.620404] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a53b0630-1a13-4e65-8cd3-c8632f63d675 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.631032] env[62405]: DEBUG oslo_vmware.api [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Waiting for the task: (returnval){ [ 1815.631032] env[62405]: value = "task-1947587" [ 1815.631032] env[62405]: _type = "Task" [ 1815.631032] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1815.637934] env[62405]: DEBUG oslo_vmware.api [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947587, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1815.750314] env[62405]: DEBUG oslo_vmware.api [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52dfa72e-b562-9160-f07f-f1558fb45420, 'name': SearchDatastore_Task, 'duration_secs': 0.009493} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1815.750314] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1815.750314] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: af174cbf-3555-42b0-bacd-033f9ff46f08] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1815.750314] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1815.750314] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1815.750314] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1815.750314] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-94bbdb39-33a3-4cb2-8b1c-6a50b5326eb3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.766163] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1815.766715] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1815.767548] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cab135c7-ca5c-48b9-a6cf-835196c03557 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.772751] env[62405]: DEBUG oslo_vmware.api [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for the task: (returnval){ [ 1815.772751] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5272d7b9-d355-adea-12a5-573738a8bb7b" [ 1815.772751] env[62405]: _type = "Task" [ 1815.772751] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1815.780650] env[62405]: DEBUG oslo_vmware.api [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5272d7b9-d355-adea-12a5-573738a8bb7b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1815.889029] env[62405]: DEBUG nova.compute.utils [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1815.889029] env[62405]: DEBUG nova.compute.manager [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: 00158b10-4292-48f3-85a0-991af1dbc5f1] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1815.889029] env[62405]: DEBUG nova.network.neutron [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: 00158b10-4292-48f3-85a0-991af1dbc5f1] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1815.930063] env[62405]: DEBUG nova.policy [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1f465915a21943b58ddfe2d0d5816fbc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '774aaaffb55b401eae1c919aa2f45675', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1816.082464] env[62405]: DEBUG oslo_concurrency.lockutils [req-2b07a109-f0cf-431e-83dd-672d92c5c1ff req-fc25a459-e4b9-4af2-807c-9606e5b77142 service nova] Releasing lock "refresh_cache-af174cbf-3555-42b0-bacd-033f9ff46f08" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1816.142843] env[62405]: DEBUG oslo_vmware.api [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947587, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1816.159626] env[62405]: DEBUG nova.network.neutron [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d] Successfully updated port: a051e2aa-2501-4f7a-82b2-25f0988776c6 {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1816.203248] env[62405]: DEBUG nova.network.neutron [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: 00158b10-4292-48f3-85a0-991af1dbc5f1] Successfully created port: 556c9e65-2036-4d78-9b15-857b8261e634 {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1816.283636] env[62405]: DEBUG oslo_vmware.api [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5272d7b9-d355-adea-12a5-573738a8bb7b, 'name': SearchDatastore_Task, 'duration_secs': 0.025958} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1816.284281] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-53de3191-32ae-4d8f-8333-b557e6263b39 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.289790] env[62405]: DEBUG oslo_vmware.api [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for the task: (returnval){ [ 1816.289790] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52656c7b-a925-73f9-1279-12760696f618" [ 1816.289790] env[62405]: _type = "Task" [ 1816.289790] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1816.297687] env[62405]: DEBUG oslo_vmware.api [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52656c7b-a925-73f9-1279-12760696f618, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1816.393016] env[62405]: DEBUG nova.compute.manager [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: 00158b10-4292-48f3-85a0-991af1dbc5f1] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1816.638900] env[62405]: DEBUG oslo_vmware.api [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947587, 'name': CopyVirtualDisk_Task} progress is 18%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1816.661716] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Acquiring lock "refresh_cache-d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1816.662016] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Acquired lock "refresh_cache-d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1816.662112] env[62405]: DEBUG nova.network.neutron [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1816.804403] env[62405]: DEBUG oslo_vmware.api [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52656c7b-a925-73f9-1279-12760696f618, 'name': SearchDatastore_Task, 'duration_secs': 0.044354} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1816.804887] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1816.805273] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] af174cbf-3555-42b0-bacd-033f9ff46f08/af174cbf-3555-42b0-bacd-033f9ff46f08.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1816.805700] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-505b87bb-6314-4421-b71c-6166825f5410 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.817027] env[62405]: DEBUG oslo_vmware.api [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for the task: (returnval){ [ 1816.817027] env[62405]: value = "task-1947588" [ 1816.817027] env[62405]: _type = "Task" [ 1816.817027] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1816.824174] env[62405]: DEBUG oslo_vmware.api [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947588, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1816.874018] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88a02815-105e-44e6-ac35-fa89b024d138 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.882788] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ab24861-4576-42a6-b684-8e8850afb47a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.923798] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-938c165d-53cc-421d-93bf-686952f8fa83 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.939777] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6956e20-2eb8-40ec-a0fc-5f00fca3eb76 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.968032] env[62405]: DEBUG nova.compute.provider_tree [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1816.979070] env[62405]: DEBUG nova.compute.manager [req-efc0e828-31e6-40a1-afb5-4e57b43f8336 req-40ce2476-aefe-4683-9e7c-0749e4170ba7 service nova] [instance: d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d] Received event network-vif-plugged-a051e2aa-2501-4f7a-82b2-25f0988776c6 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1816.979070] env[62405]: DEBUG oslo_concurrency.lockutils [req-efc0e828-31e6-40a1-afb5-4e57b43f8336 req-40ce2476-aefe-4683-9e7c-0749e4170ba7 service nova] Acquiring lock "d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1816.979070] env[62405]: DEBUG oslo_concurrency.lockutils [req-efc0e828-31e6-40a1-afb5-4e57b43f8336 req-40ce2476-aefe-4683-9e7c-0749e4170ba7 service nova] Lock "d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1816.979070] env[62405]: DEBUG oslo_concurrency.lockutils [req-efc0e828-31e6-40a1-afb5-4e57b43f8336 req-40ce2476-aefe-4683-9e7c-0749e4170ba7 service nova] Lock "d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1816.979070] env[62405]: DEBUG nova.compute.manager [req-efc0e828-31e6-40a1-afb5-4e57b43f8336 req-40ce2476-aefe-4683-9e7c-0749e4170ba7 service nova] [instance: d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d] No waiting events found dispatching network-vif-plugged-a051e2aa-2501-4f7a-82b2-25f0988776c6 {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1816.979070] env[62405]: WARNING nova.compute.manager [req-efc0e828-31e6-40a1-afb5-4e57b43f8336 req-40ce2476-aefe-4683-9e7c-0749e4170ba7 service nova] [instance: d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d] Received unexpected event network-vif-plugged-a051e2aa-2501-4f7a-82b2-25f0988776c6 for instance with vm_state building and task_state spawning. [ 1816.979070] env[62405]: DEBUG nova.compute.manager [req-efc0e828-31e6-40a1-afb5-4e57b43f8336 req-40ce2476-aefe-4683-9e7c-0749e4170ba7 service nova] [instance: d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d] Received event network-changed-a051e2aa-2501-4f7a-82b2-25f0988776c6 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1816.979426] env[62405]: DEBUG nova.compute.manager [req-efc0e828-31e6-40a1-afb5-4e57b43f8336 req-40ce2476-aefe-4683-9e7c-0749e4170ba7 service nova] [instance: d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d] Refreshing instance network info cache due to event network-changed-a051e2aa-2501-4f7a-82b2-25f0988776c6. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1816.979799] env[62405]: DEBUG oslo_concurrency.lockutils [req-efc0e828-31e6-40a1-afb5-4e57b43f8336 req-40ce2476-aefe-4683-9e7c-0749e4170ba7 service nova] Acquiring lock "refresh_cache-d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1817.142779] env[62405]: DEBUG oslo_vmware.api [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947587, 'name': CopyVirtualDisk_Task} progress is 35%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1817.251954] env[62405]: DEBUG nova.network.neutron [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1817.329423] env[62405]: DEBUG oslo_vmware.api [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947588, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1817.424906] env[62405]: DEBUG nova.compute.manager [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: 00158b10-4292-48f3-85a0-991af1dbc5f1] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1817.458135] env[62405]: DEBUG nova.virt.hardware [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1817.458135] env[62405]: DEBUG nova.virt.hardware [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1817.458135] env[62405]: DEBUG nova.virt.hardware [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1817.458135] env[62405]: DEBUG nova.virt.hardware [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1817.458135] env[62405]: DEBUG nova.virt.hardware [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1817.458135] env[62405]: DEBUG nova.virt.hardware [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1817.458821] env[62405]: DEBUG nova.virt.hardware [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1817.459148] env[62405]: DEBUG nova.virt.hardware [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1817.459439] env[62405]: DEBUG nova.virt.hardware [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1817.459714] env[62405]: DEBUG nova.virt.hardware [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1817.459997] env[62405]: DEBUG nova.virt.hardware [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1817.461601] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f73850cf-0141-431b-927b-9b96e7a80589 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.470616] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69f2aa11-757c-4756-9f4c-799951762df6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.488965] env[62405]: DEBUG nova.scheduler.client.report [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1817.555874] env[62405]: DEBUG nova.network.neutron [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d] Updating instance_info_cache with network_info: [{"id": "a051e2aa-2501-4f7a-82b2-25f0988776c6", "address": "fa:16:3e:fa:e2:64", "network": {"id": "e785f241-c0f9-4e7b-978a-316f93e62a7a", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-442287566-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a949e89f885745acb15d0afd4893ce68", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cf63c3c8-d774-4b81-9b12-848612a96076", "external-id": "nsx-vlan-transportzone-315", "segmentation_id": 315, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa051e2aa-25", "ovs_interfaceid": "a051e2aa-2501-4f7a-82b2-25f0988776c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1817.642223] env[62405]: DEBUG oslo_vmware.api [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947587, 'name': CopyVirtualDisk_Task} progress is 54%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1817.657188] env[62405]: DEBUG nova.compute.manager [req-2d72c66a-12d9-47d3-9b2e-916b94c20f7a req-927b6329-4891-419b-be44-7f4cc2d2c651 service nova] [instance: 00158b10-4292-48f3-85a0-991af1dbc5f1] Received event network-vif-plugged-556c9e65-2036-4d78-9b15-857b8261e634 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1817.657188] env[62405]: DEBUG oslo_concurrency.lockutils [req-2d72c66a-12d9-47d3-9b2e-916b94c20f7a req-927b6329-4891-419b-be44-7f4cc2d2c651 service nova] Acquiring lock "00158b10-4292-48f3-85a0-991af1dbc5f1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1817.657188] env[62405]: DEBUG oslo_concurrency.lockutils [req-2d72c66a-12d9-47d3-9b2e-916b94c20f7a req-927b6329-4891-419b-be44-7f4cc2d2c651 service nova] Lock "00158b10-4292-48f3-85a0-991af1dbc5f1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1817.657188] env[62405]: DEBUG oslo_concurrency.lockutils [req-2d72c66a-12d9-47d3-9b2e-916b94c20f7a req-927b6329-4891-419b-be44-7f4cc2d2c651 service nova] Lock "00158b10-4292-48f3-85a0-991af1dbc5f1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1817.657188] env[62405]: DEBUG nova.compute.manager [req-2d72c66a-12d9-47d3-9b2e-916b94c20f7a req-927b6329-4891-419b-be44-7f4cc2d2c651 service nova] [instance: 00158b10-4292-48f3-85a0-991af1dbc5f1] No waiting events found dispatching network-vif-plugged-556c9e65-2036-4d78-9b15-857b8261e634 {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1817.657188] env[62405]: WARNING nova.compute.manager [req-2d72c66a-12d9-47d3-9b2e-916b94c20f7a req-927b6329-4891-419b-be44-7f4cc2d2c651 service nova] [instance: 00158b10-4292-48f3-85a0-991af1dbc5f1] Received unexpected event network-vif-plugged-556c9e65-2036-4d78-9b15-857b8261e634 for instance with vm_state building and task_state spawning. [ 1817.796945] env[62405]: DEBUG nova.network.neutron [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: 00158b10-4292-48f3-85a0-991af1dbc5f1] Successfully updated port: 556c9e65-2036-4d78-9b15-857b8261e634 {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1817.827219] env[62405]: DEBUG oslo_vmware.api [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947588, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1817.993983] env[62405]: DEBUG oslo_concurrency.lockutils [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.613s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1817.994532] env[62405]: DEBUG nova.compute.manager [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: b4b89cf6-4159-40fa-8b67-4d8bbf16eb32] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1817.997779] env[62405]: DEBUG oslo_concurrency.lockutils [None req-95031a91-3c8c-4837-a4d7-da95100f0fbb tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.237s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1817.998340] env[62405]: DEBUG nova.objects.instance [None req-95031a91-3c8c-4837-a4d7-da95100f0fbb tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Lazy-loading 'resources' on Instance uuid f410acd2-f786-43bd-ad60-0a6248dedb1c {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1818.060252] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Releasing lock "refresh_cache-d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1818.060733] env[62405]: DEBUG nova.compute.manager [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d] Instance network_info: |[{"id": "a051e2aa-2501-4f7a-82b2-25f0988776c6", "address": "fa:16:3e:fa:e2:64", "network": {"id": "e785f241-c0f9-4e7b-978a-316f93e62a7a", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-442287566-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a949e89f885745acb15d0afd4893ce68", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cf63c3c8-d774-4b81-9b12-848612a96076", "external-id": "nsx-vlan-transportzone-315", "segmentation_id": 315, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa051e2aa-25", "ovs_interfaceid": "a051e2aa-2501-4f7a-82b2-25f0988776c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1818.064021] env[62405]: DEBUG oslo_concurrency.lockutils [req-efc0e828-31e6-40a1-afb5-4e57b43f8336 req-40ce2476-aefe-4683-9e7c-0749e4170ba7 service nova] Acquired lock "refresh_cache-d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1818.064021] env[62405]: DEBUG nova.network.neutron [req-efc0e828-31e6-40a1-afb5-4e57b43f8336 req-40ce2476-aefe-4683-9e7c-0749e4170ba7 service nova] [instance: d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d] Refreshing network info cache for port a051e2aa-2501-4f7a-82b2-25f0988776c6 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1818.064021] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fa:e2:64', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cf63c3c8-d774-4b81-9b12-848612a96076', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a051e2aa-2501-4f7a-82b2-25f0988776c6', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1818.071799] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Creating folder: Project (a949e89f885745acb15d0afd4893ce68). Parent ref: group-v401284. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1818.075634] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9bfa8754-90bc-4cda-b262-4970f06af67b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.088768] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Created folder: Project (a949e89f885745acb15d0afd4893ce68) in parent group-v401284. [ 1818.088768] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Creating folder: Instances. Parent ref: group-v401501. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1818.089138] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-06c4bf8b-92aa-4830-90a1-62ea75b42235 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.099868] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Created folder: Instances in parent group-v401501. [ 1818.100166] env[62405]: DEBUG oslo.service.loopingcall [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1818.100432] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1818.100659] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fa551861-a173-4e65-87c2-7dffd9731e0d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.128828] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1818.128828] env[62405]: value = "task-1947591" [ 1818.128828] env[62405]: _type = "Task" [ 1818.128828] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1818.145678] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947591, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1818.145678] env[62405]: DEBUG oslo_vmware.api [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947587, 'name': CopyVirtualDisk_Task} progress is 74%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1818.301344] env[62405]: DEBUG oslo_concurrency.lockutils [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Acquiring lock "refresh_cache-00158b10-4292-48f3-85a0-991af1dbc5f1" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1818.302873] env[62405]: DEBUG oslo_concurrency.lockutils [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Acquired lock "refresh_cache-00158b10-4292-48f3-85a0-991af1dbc5f1" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1818.302873] env[62405]: DEBUG nova.network.neutron [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: 00158b10-4292-48f3-85a0-991af1dbc5f1] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1818.330540] env[62405]: DEBUG oslo_vmware.api [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947588, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1818.450430] env[62405]: DEBUG nova.network.neutron [req-efc0e828-31e6-40a1-afb5-4e57b43f8336 req-40ce2476-aefe-4683-9e7c-0749e4170ba7 service nova] [instance: d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d] Updated VIF entry in instance network info cache for port a051e2aa-2501-4f7a-82b2-25f0988776c6. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1818.451290] env[62405]: DEBUG nova.network.neutron [req-efc0e828-31e6-40a1-afb5-4e57b43f8336 req-40ce2476-aefe-4683-9e7c-0749e4170ba7 service nova] [instance: d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d] Updating instance_info_cache with network_info: [{"id": "a051e2aa-2501-4f7a-82b2-25f0988776c6", "address": "fa:16:3e:fa:e2:64", "network": {"id": "e785f241-c0f9-4e7b-978a-316f93e62a7a", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-442287566-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a949e89f885745acb15d0afd4893ce68", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cf63c3c8-d774-4b81-9b12-848612a96076", "external-id": "nsx-vlan-transportzone-315", "segmentation_id": 315, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa051e2aa-25", "ovs_interfaceid": "a051e2aa-2501-4f7a-82b2-25f0988776c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1818.503517] env[62405]: DEBUG nova.compute.utils [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1818.509631] env[62405]: DEBUG nova.compute.manager [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: b4b89cf6-4159-40fa-8b67-4d8bbf16eb32] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1818.509631] env[62405]: DEBUG nova.network.neutron [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: b4b89cf6-4159-40fa-8b67-4d8bbf16eb32] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1818.562873] env[62405]: DEBUG nova.policy [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1f465915a21943b58ddfe2d0d5816fbc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '774aaaffb55b401eae1c919aa2f45675', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1818.644950] env[62405]: DEBUG oslo_vmware.api [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947587, 'name': CopyVirtualDisk_Task} progress is 91%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1818.650884] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947591, 'name': CreateVM_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1818.831369] env[62405]: DEBUG oslo_vmware.api [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947588, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1818.846092] env[62405]: DEBUG nova.network.neutron [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: 00158b10-4292-48f3-85a0-991af1dbc5f1] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1818.889973] env[62405]: DEBUG nova.network.neutron [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: b4b89cf6-4159-40fa-8b67-4d8bbf16eb32] Successfully created port: ff6b512b-10d4-455e-85af-39beb8f916a2 {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1818.955650] env[62405]: DEBUG oslo_concurrency.lockutils [req-efc0e828-31e6-40a1-afb5-4e57b43f8336 req-40ce2476-aefe-4683-9e7c-0749e4170ba7 service nova] Releasing lock "refresh_cache-d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1819.009514] env[62405]: DEBUG nova.compute.manager [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: b4b89cf6-4159-40fa-8b67-4d8bbf16eb32] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1819.035346] env[62405]: DEBUG nova.network.neutron [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: 00158b10-4292-48f3-85a0-991af1dbc5f1] Updating instance_info_cache with network_info: [{"id": "556c9e65-2036-4d78-9b15-857b8261e634", "address": "fa:16:3e:a0:7f:66", "network": {"id": "954a06a6-91b0-4e43-a964-815d7cb120a7", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1314830830-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "774aaaffb55b401eae1c919aa2f45675", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53955f0e-c162-4cef-8bd5-335b369c36b6", "external-id": "nsx-vlan-transportzone-623", "segmentation_id": 623, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap556c9e65-20", "ovs_interfaceid": "556c9e65-2036-4d78-9b15-857b8261e634", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1819.037168] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de3d4bcd-0477-4b91-bfb5-dfa350d984c5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.044885] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51913142-a976-4266-9d3d-7c51f923b198 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.075954] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d23f80db-b77f-4751-bd7a-d6aa7821118a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.086157] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ae72961-c797-4023-a0a0-7c15e7a54792 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.100305] env[62405]: DEBUG nova.compute.provider_tree [None req-95031a91-3c8c-4837-a4d7-da95100f0fbb tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1819.142641] env[62405]: DEBUG oslo_vmware.api [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947587, 'name': CopyVirtualDisk_Task, 'duration_secs': 3.20025} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1819.146194] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1e2c6626-50b1-4468-a3b7-982412fb92f3/1e2c6626-50b1-4468-a3b7-982412fb92f3.vmdk to [datastore1] 2ab5f28c-1f71-4bea-8733-523e5570f5c6/2ab5f28c-1f71-4bea-8733-523e5570f5c6.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1819.146194] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947591, 'name': CreateVM_Task, 'duration_secs': 0.5839} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1819.146839] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86e3c096-c163-4b03-b701-6da7d8e5bca6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.149440] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1819.150140] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1819.150307] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1819.150607] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1819.151187] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6c20a456-7b3d-4c17-b018-0806b367d124 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.171196] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 2ab5f28c-1f71-4bea-8733-523e5570f5c6] Reconfiguring VM instance instance-00000049 to attach disk [datastore1] 2ab5f28c-1f71-4bea-8733-523e5570f5c6/2ab5f28c-1f71-4bea-8733-523e5570f5c6.vmdk or device None with type streamOptimized {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1819.173063] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-13709b29-c98b-489a-a810-2d4756bdae9d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.191127] env[62405]: DEBUG oslo_vmware.api [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Waiting for the task: (returnval){ [ 1819.191127] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]525bc6a6-a8e6-7f96-1c81-e3f91698e6e4" [ 1819.191127] env[62405]: _type = "Task" [ 1819.191127] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1819.200687] env[62405]: DEBUG oslo_vmware.api [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Waiting for the task: (returnval){ [ 1819.200687] env[62405]: value = "task-1947592" [ 1819.200687] env[62405]: _type = "Task" [ 1819.200687] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1819.202178] env[62405]: DEBUG oslo_vmware.api [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]525bc6a6-a8e6-7f96-1c81-e3f91698e6e4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1819.211568] env[62405]: DEBUG oslo_vmware.api [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947592, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1819.326257] env[62405]: DEBUG oslo_concurrency.lockutils [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Acquiring lock "65cd4af4-30cf-4435-8f32-501db450905f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1819.326484] env[62405]: DEBUG oslo_concurrency.lockutils [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Lock "65cd4af4-30cf-4435-8f32-501db450905f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1819.332967] env[62405]: DEBUG oslo_vmware.api [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947588, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1819.540599] env[62405]: DEBUG oslo_concurrency.lockutils [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Releasing lock "refresh_cache-00158b10-4292-48f3-85a0-991af1dbc5f1" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1819.540911] env[62405]: DEBUG nova.compute.manager [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: 00158b10-4292-48f3-85a0-991af1dbc5f1] Instance network_info: |[{"id": "556c9e65-2036-4d78-9b15-857b8261e634", "address": "fa:16:3e:a0:7f:66", "network": {"id": "954a06a6-91b0-4e43-a964-815d7cb120a7", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1314830830-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "774aaaffb55b401eae1c919aa2f45675", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53955f0e-c162-4cef-8bd5-335b369c36b6", "external-id": "nsx-vlan-transportzone-623", "segmentation_id": 623, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap556c9e65-20", "ovs_interfaceid": "556c9e65-2036-4d78-9b15-857b8261e634", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1819.541344] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: 00158b10-4292-48f3-85a0-991af1dbc5f1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a0:7f:66', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '53955f0e-c162-4cef-8bd5-335b369c36b6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '556c9e65-2036-4d78-9b15-857b8261e634', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1819.549256] env[62405]: DEBUG oslo.service.loopingcall [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1819.549473] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 00158b10-4292-48f3-85a0-991af1dbc5f1] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1819.549694] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-35e56c4e-6695-4001-bfa2-833396f0b499 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.570978] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1819.570978] env[62405]: value = "task-1947593" [ 1819.570978] env[62405]: _type = "Task" [ 1819.570978] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1819.579278] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947593, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1819.603545] env[62405]: DEBUG nova.scheduler.client.report [None req-95031a91-3c8c-4837-a4d7-da95100f0fbb tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1819.686326] env[62405]: DEBUG nova.compute.manager [req-96348860-a153-439e-b3ef-b1c552889b24 req-3e4210fb-59c3-4323-8300-284a9a4a0987 service nova] [instance: 00158b10-4292-48f3-85a0-991af1dbc5f1] Received event network-changed-556c9e65-2036-4d78-9b15-857b8261e634 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1819.686573] env[62405]: DEBUG nova.compute.manager [req-96348860-a153-439e-b3ef-b1c552889b24 req-3e4210fb-59c3-4323-8300-284a9a4a0987 service nova] [instance: 00158b10-4292-48f3-85a0-991af1dbc5f1] Refreshing instance network info cache due to event network-changed-556c9e65-2036-4d78-9b15-857b8261e634. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1819.686754] env[62405]: DEBUG oslo_concurrency.lockutils [req-96348860-a153-439e-b3ef-b1c552889b24 req-3e4210fb-59c3-4323-8300-284a9a4a0987 service nova] Acquiring lock "refresh_cache-00158b10-4292-48f3-85a0-991af1dbc5f1" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1819.686905] env[62405]: DEBUG oslo_concurrency.lockutils [req-96348860-a153-439e-b3ef-b1c552889b24 req-3e4210fb-59c3-4323-8300-284a9a4a0987 service nova] Acquired lock "refresh_cache-00158b10-4292-48f3-85a0-991af1dbc5f1" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1819.687116] env[62405]: DEBUG nova.network.neutron [req-96348860-a153-439e-b3ef-b1c552889b24 req-3e4210fb-59c3-4323-8300-284a9a4a0987 service nova] [instance: 00158b10-4292-48f3-85a0-991af1dbc5f1] Refreshing network info cache for port 556c9e65-2036-4d78-9b15-857b8261e634 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1819.703224] env[62405]: DEBUG oslo_vmware.api [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]525bc6a6-a8e6-7f96-1c81-e3f91698e6e4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1819.713992] env[62405]: DEBUG oslo_vmware.api [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947592, 'name': ReconfigVM_Task, 'duration_secs': 0.288633} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1819.713992] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 2ab5f28c-1f71-4bea-8733-523e5570f5c6] Reconfigured VM instance instance-00000049 to attach disk [datastore1] 2ab5f28c-1f71-4bea-8733-523e5570f5c6/2ab5f28c-1f71-4bea-8733-523e5570f5c6.vmdk or device None with type streamOptimized {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1819.713992] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-37e7a421-4cc0-4dc8-9ad8-2cb872679759 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.720065] env[62405]: DEBUG oslo_vmware.api [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Waiting for the task: (returnval){ [ 1819.720065] env[62405]: value = "task-1947594" [ 1819.720065] env[62405]: _type = "Task" [ 1819.720065] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1819.728075] env[62405]: DEBUG oslo_vmware.api [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947594, 'name': Rename_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1819.833040] env[62405]: DEBUG nova.compute.manager [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] [instance: 65cd4af4-30cf-4435-8f32-501db450905f] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1819.837023] env[62405]: DEBUG oslo_vmware.api [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947588, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1820.024895] env[62405]: DEBUG nova.compute.manager [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: b4b89cf6-4159-40fa-8b67-4d8bbf16eb32] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1820.049454] env[62405]: DEBUG nova.virt.hardware [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1820.049691] env[62405]: DEBUG nova.virt.hardware [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1820.049850] env[62405]: DEBUG nova.virt.hardware [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1820.050044] env[62405]: DEBUG nova.virt.hardware [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1820.050194] env[62405]: DEBUG nova.virt.hardware [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1820.050338] env[62405]: DEBUG nova.virt.hardware [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1820.050542] env[62405]: DEBUG nova.virt.hardware [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1820.050702] env[62405]: DEBUG nova.virt.hardware [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1820.050871] env[62405]: DEBUG nova.virt.hardware [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1820.051045] env[62405]: DEBUG nova.virt.hardware [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1820.051224] env[62405]: DEBUG nova.virt.hardware [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1820.052059] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28eab6ff-8440-4c9a-9e57-c845f84d2247 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.060233] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81ee61e5-4655-4c36-82d6-025c1920a144 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.081205] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947593, 'name': CreateVM_Task, 'duration_secs': 0.365031} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1820.081369] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 00158b10-4292-48f3-85a0-991af1dbc5f1] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1820.082046] env[62405]: DEBUG oslo_concurrency.lockutils [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1820.108282] env[62405]: DEBUG oslo_concurrency.lockutils [None req-95031a91-3c8c-4837-a4d7-da95100f0fbb tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.110s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1820.110764] env[62405]: DEBUG oslo_concurrency.lockutils [None req-298f3e3e-50d5-4c87-be64-626a9ecfe54d tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.624s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1820.112478] env[62405]: INFO nova.compute.claims [None req-298f3e3e-50d5-4c87-be64-626a9ecfe54d tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] [instance: 73c5b28f-d21d-4ffc-9e67-911e4fb4db66] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1820.128122] env[62405]: INFO nova.scheduler.client.report [None req-95031a91-3c8c-4837-a4d7-da95100f0fbb tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Deleted allocations for instance f410acd2-f786-43bd-ad60-0a6248dedb1c [ 1820.203907] env[62405]: DEBUG oslo_vmware.api [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]525bc6a6-a8e6-7f96-1c81-e3f91698e6e4, 'name': SearchDatastore_Task, 'duration_secs': 0.590993} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1820.204150] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1820.204388] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1820.204797] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1820.205026] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1820.205138] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1820.205343] env[62405]: DEBUG oslo_concurrency.lockutils [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1820.205648] env[62405]: DEBUG oslo_concurrency.lockutils [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1820.206104] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-813190d7-0ac8-4dc6-8721-b5baf4bd012f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.207882] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-272fbd56-d8e1-426a-8068-f64956c96c31 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.214686] env[62405]: DEBUG oslo_vmware.api [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Waiting for the task: (returnval){ [ 1820.214686] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]520b1b9f-dabd-7fe5-62cf-950a7ab18837" [ 1820.214686] env[62405]: _type = "Task" [ 1820.214686] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1820.222535] env[62405]: DEBUG oslo_vmware.api [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]520b1b9f-dabd-7fe5-62cf-950a7ab18837, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1820.229893] env[62405]: DEBUG oslo_vmware.api [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947594, 'name': Rename_Task, 'duration_secs': 0.182661} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1820.230166] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 2ab5f28c-1f71-4bea-8733-523e5570f5c6] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1820.230395] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4acb2d92-dde2-4875-84bb-64d29677600c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.236901] env[62405]: DEBUG oslo_vmware.api [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Waiting for the task: (returnval){ [ 1820.236901] env[62405]: value = "task-1947595" [ 1820.236901] env[62405]: _type = "Task" [ 1820.236901] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1820.248175] env[62405]: DEBUG oslo_vmware.api [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947595, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1820.332006] env[62405]: DEBUG oslo_vmware.api [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947588, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1820.341816] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1820.341816] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1820.345442] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9a284297-a9e3-49ca-81d1-f14c73c971f0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.355275] env[62405]: DEBUG oslo_vmware.api [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Waiting for the task: (returnval){ [ 1820.355275] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52f5bbb9-9353-22c5-e048-0a2796e25ce0" [ 1820.355275] env[62405]: _type = "Task" [ 1820.355275] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1820.364166] env[62405]: DEBUG oslo_concurrency.lockutils [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1820.369309] env[62405]: DEBUG oslo_vmware.api [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52f5bbb9-9353-22c5-e048-0a2796e25ce0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1820.497612] env[62405]: DEBUG nova.network.neutron [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: b4b89cf6-4159-40fa-8b67-4d8bbf16eb32] Successfully updated port: ff6b512b-10d4-455e-85af-39beb8f916a2 {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1820.636464] env[62405]: DEBUG oslo_concurrency.lockutils [None req-95031a91-3c8c-4837-a4d7-da95100f0fbb tempest-ServerMetadataNegativeTestJSON-1628476971 tempest-ServerMetadataNegativeTestJSON-1628476971-project-member] Lock "f410acd2-f786-43bd-ad60-0a6248dedb1c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.863s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1820.725921] env[62405]: DEBUG oslo_vmware.api [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]520b1b9f-dabd-7fe5-62cf-950a7ab18837, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1820.727976] env[62405]: DEBUG nova.network.neutron [req-96348860-a153-439e-b3ef-b1c552889b24 req-3e4210fb-59c3-4323-8300-284a9a4a0987 service nova] [instance: 00158b10-4292-48f3-85a0-991af1dbc5f1] Updated VIF entry in instance network info cache for port 556c9e65-2036-4d78-9b15-857b8261e634. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1820.728344] env[62405]: DEBUG nova.network.neutron [req-96348860-a153-439e-b3ef-b1c552889b24 req-3e4210fb-59c3-4323-8300-284a9a4a0987 service nova] [instance: 00158b10-4292-48f3-85a0-991af1dbc5f1] Updating instance_info_cache with network_info: [{"id": "556c9e65-2036-4d78-9b15-857b8261e634", "address": "fa:16:3e:a0:7f:66", "network": {"id": "954a06a6-91b0-4e43-a964-815d7cb120a7", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1314830830-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "774aaaffb55b401eae1c919aa2f45675", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53955f0e-c162-4cef-8bd5-335b369c36b6", "external-id": "nsx-vlan-transportzone-623", "segmentation_id": 623, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap556c9e65-20", "ovs_interfaceid": "556c9e65-2036-4d78-9b15-857b8261e634", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1820.748875] env[62405]: DEBUG oslo_vmware.api [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947595, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1820.832957] env[62405]: DEBUG oslo_vmware.api [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947588, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1820.870387] env[62405]: DEBUG oslo_vmware.api [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52f5bbb9-9353-22c5-e048-0a2796e25ce0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1821.002765] env[62405]: DEBUG oslo_concurrency.lockutils [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Acquiring lock "refresh_cache-b4b89cf6-4159-40fa-8b67-4d8bbf16eb32" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1821.002953] env[62405]: DEBUG oslo_concurrency.lockutils [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Acquired lock "refresh_cache-b4b89cf6-4159-40fa-8b67-4d8bbf16eb32" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1821.003135] env[62405]: DEBUG nova.network.neutron [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: b4b89cf6-4159-40fa-8b67-4d8bbf16eb32] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1821.226972] env[62405]: DEBUG oslo_vmware.api [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]520b1b9f-dabd-7fe5-62cf-950a7ab18837, 'name': SearchDatastore_Task, 'duration_secs': 0.656498} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1821.229325] env[62405]: DEBUG oslo_concurrency.lockutils [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1821.229561] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: 00158b10-4292-48f3-85a0-991af1dbc5f1] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1821.229773] env[62405]: DEBUG oslo_concurrency.lockutils [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1821.230733] env[62405]: DEBUG oslo_concurrency.lockutils [req-96348860-a153-439e-b3ef-b1c552889b24 req-3e4210fb-59c3-4323-8300-284a9a4a0987 service nova] Releasing lock "refresh_cache-00158b10-4292-48f3-85a0-991af1dbc5f1" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1821.247558] env[62405]: DEBUG oslo_vmware.api [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947595, 'name': PowerOnVM_Task, 'duration_secs': 0.53073} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1821.247837] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 2ab5f28c-1f71-4bea-8733-523e5570f5c6] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1821.248063] env[62405]: INFO nova.compute.manager [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 2ab5f28c-1f71-4bea-8733-523e5570f5c6] Took 16.01 seconds to spawn the instance on the hypervisor. [ 1821.248253] env[62405]: DEBUG nova.compute.manager [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 2ab5f28c-1f71-4bea-8733-523e5570f5c6] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1821.249077] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e7de923-e0ba-4c78-9a61-a7f7fab06495 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.335045] env[62405]: DEBUG oslo_vmware.api [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947588, 'name': CopyVirtualDisk_Task, 'duration_secs': 4.068034} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1821.335657] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] af174cbf-3555-42b0-bacd-033f9ff46f08/af174cbf-3555-42b0-bacd-033f9ff46f08.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1821.335657] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: af174cbf-3555-42b0-bacd-033f9ff46f08] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1821.335848] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cc307f13-77ed-4e38-a762-f17e9eae5cc7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.344020] env[62405]: DEBUG oslo_vmware.api [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for the task: (returnval){ [ 1821.344020] env[62405]: value = "task-1947596" [ 1821.344020] env[62405]: _type = "Task" [ 1821.344020] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1821.351578] env[62405]: DEBUG oslo_vmware.api [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947596, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1821.367820] env[62405]: DEBUG oslo_vmware.api [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52f5bbb9-9353-22c5-e048-0a2796e25ce0, 'name': SearchDatastore_Task, 'duration_secs': 0.519978} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1821.372036] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-69d39048-f000-4ff4-8c4b-4058dc6f7eca {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.377031] env[62405]: DEBUG oslo_vmware.api [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Waiting for the task: (returnval){ [ 1821.377031] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]523d0263-f4b5-9e9c-b04a-b9c78b12a458" [ 1821.377031] env[62405]: _type = "Task" [ 1821.377031] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1821.387486] env[62405]: DEBUG oslo_vmware.api [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]523d0263-f4b5-9e9c-b04a-b9c78b12a458, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1821.502900] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bd79bb1-6b83-4db0-ac16-0ddfb92ab141 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.512142] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-049c895d-4010-42ca-91eb-adc6139d598a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.541852] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3a53f21-29ea-4afe-b2e9-68dc4291e6ab {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.550766] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa040e0e-ce15-414c-9497-980069a7fb9d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.563916] env[62405]: DEBUG nova.compute.provider_tree [None req-298f3e3e-50d5-4c87-be64-626a9ecfe54d tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1821.663384] env[62405]: DEBUG nova.network.neutron [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: b4b89cf6-4159-40fa-8b67-4d8bbf16eb32] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1821.769691] env[62405]: INFO nova.compute.manager [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 2ab5f28c-1f71-4bea-8733-523e5570f5c6] Took 54.37 seconds to build instance. [ 1821.852219] env[62405]: DEBUG oslo_vmware.api [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947596, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066138} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1821.852498] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: af174cbf-3555-42b0-bacd-033f9ff46f08] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1821.853310] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d61164b3-330c-4dfb-a598-95c14438ca1f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.885445] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: af174cbf-3555-42b0-bacd-033f9ff46f08] Reconfiguring VM instance instance-0000004a to attach disk [datastore1] af174cbf-3555-42b0-bacd-033f9ff46f08/af174cbf-3555-42b0-bacd-033f9ff46f08.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1821.885445] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-30ffc562-1cbc-4ab3-8018-0561679541e3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.901392] env[62405]: DEBUG nova.network.neutron [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: b4b89cf6-4159-40fa-8b67-4d8bbf16eb32] Updating instance_info_cache with network_info: [{"id": "ff6b512b-10d4-455e-85af-39beb8f916a2", "address": "fa:16:3e:a6:89:ba", "network": {"id": "954a06a6-91b0-4e43-a964-815d7cb120a7", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1314830830-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "774aaaffb55b401eae1c919aa2f45675", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53955f0e-c162-4cef-8bd5-335b369c36b6", "external-id": "nsx-vlan-transportzone-623", "segmentation_id": 623, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff6b512b-10", "ovs_interfaceid": "ff6b512b-10d4-455e-85af-39beb8f916a2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1821.908649] env[62405]: DEBUG nova.compute.manager [req-4a918b70-2432-4722-b156-758335537158 req-fee97684-d5d4-46d6-8a56-6f5221943a18 service nova] [instance: b4b89cf6-4159-40fa-8b67-4d8bbf16eb32] Received event network-vif-plugged-ff6b512b-10d4-455e-85af-39beb8f916a2 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1821.908837] env[62405]: DEBUG oslo_concurrency.lockutils [req-4a918b70-2432-4722-b156-758335537158 req-fee97684-d5d4-46d6-8a56-6f5221943a18 service nova] Acquiring lock "b4b89cf6-4159-40fa-8b67-4d8bbf16eb32-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1821.909056] env[62405]: DEBUG oslo_concurrency.lockutils [req-4a918b70-2432-4722-b156-758335537158 req-fee97684-d5d4-46d6-8a56-6f5221943a18 service nova] Lock "b4b89cf6-4159-40fa-8b67-4d8bbf16eb32-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1821.909229] env[62405]: DEBUG oslo_concurrency.lockutils [req-4a918b70-2432-4722-b156-758335537158 req-fee97684-d5d4-46d6-8a56-6f5221943a18 service nova] Lock "b4b89cf6-4159-40fa-8b67-4d8bbf16eb32-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1821.909391] env[62405]: DEBUG nova.compute.manager [req-4a918b70-2432-4722-b156-758335537158 req-fee97684-d5d4-46d6-8a56-6f5221943a18 service nova] [instance: b4b89cf6-4159-40fa-8b67-4d8bbf16eb32] No waiting events found dispatching network-vif-plugged-ff6b512b-10d4-455e-85af-39beb8f916a2 {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1821.909548] env[62405]: WARNING nova.compute.manager [req-4a918b70-2432-4722-b156-758335537158 req-fee97684-d5d4-46d6-8a56-6f5221943a18 service nova] [instance: b4b89cf6-4159-40fa-8b67-4d8bbf16eb32] Received unexpected event network-vif-plugged-ff6b512b-10d4-455e-85af-39beb8f916a2 for instance with vm_state building and task_state spawning. [ 1821.909739] env[62405]: DEBUG nova.compute.manager [req-4a918b70-2432-4722-b156-758335537158 req-fee97684-d5d4-46d6-8a56-6f5221943a18 service nova] [instance: b4b89cf6-4159-40fa-8b67-4d8bbf16eb32] Received event network-changed-ff6b512b-10d4-455e-85af-39beb8f916a2 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1821.909957] env[62405]: DEBUG nova.compute.manager [req-4a918b70-2432-4722-b156-758335537158 req-fee97684-d5d4-46d6-8a56-6f5221943a18 service nova] [instance: b4b89cf6-4159-40fa-8b67-4d8bbf16eb32] Refreshing instance network info cache due to event network-changed-ff6b512b-10d4-455e-85af-39beb8f916a2. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1821.910011] env[62405]: DEBUG oslo_concurrency.lockutils [req-4a918b70-2432-4722-b156-758335537158 req-fee97684-d5d4-46d6-8a56-6f5221943a18 service nova] Acquiring lock "refresh_cache-b4b89cf6-4159-40fa-8b67-4d8bbf16eb32" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1821.917227] env[62405]: DEBUG oslo_vmware.api [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]523d0263-f4b5-9e9c-b04a-b9c78b12a458, 'name': SearchDatastore_Task, 'duration_secs': 0.009728} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1821.919121] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1821.919398] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d/d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1821.919952] env[62405]: DEBUG oslo_vmware.api [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for the task: (returnval){ [ 1821.919952] env[62405]: value = "task-1947597" [ 1821.919952] env[62405]: _type = "Task" [ 1821.919952] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1821.920194] env[62405]: DEBUG oslo_concurrency.lockutils [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1821.920372] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1821.920586] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1752dd9f-e63c-4871-ab86-617b9ab650c9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.923562] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8a962f12-5d52-4fd1-aad2-607120839b50 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.937538] env[62405]: DEBUG oslo_vmware.api [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947597, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1821.938705] env[62405]: DEBUG oslo_vmware.api [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Waiting for the task: (returnval){ [ 1821.938705] env[62405]: value = "task-1947598" [ 1821.938705] env[62405]: _type = "Task" [ 1821.938705] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1821.940037] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1821.940037] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1821.944261] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1c8f41fa-7886-4d9b-a953-3ae74c1d27bc {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.950942] env[62405]: DEBUG oslo_vmware.api [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': task-1947598, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1821.952276] env[62405]: DEBUG oslo_vmware.api [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Waiting for the task: (returnval){ [ 1821.952276] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52d1ba6b-1304-dd1c-d3f7-49d14d7e2741" [ 1821.952276] env[62405]: _type = "Task" [ 1821.952276] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1821.965940] env[62405]: DEBUG oslo_vmware.api [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52d1ba6b-1304-dd1c-d3f7-49d14d7e2741, 'name': SearchDatastore_Task, 'duration_secs': 0.008945} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1821.966829] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4c15ad33-2eac-4042-ba3c-15e78105cea5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.972596] env[62405]: DEBUG oslo_vmware.api [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Waiting for the task: (returnval){ [ 1821.972596] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]520b4499-b521-9853-ab36-449fc4deb803" [ 1821.972596] env[62405]: _type = "Task" [ 1821.972596] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1821.983596] env[62405]: DEBUG oslo_vmware.api [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]520b4499-b521-9853-ab36-449fc4deb803, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1822.067444] env[62405]: DEBUG nova.scheduler.client.report [None req-298f3e3e-50d5-4c87-be64-626a9ecfe54d tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1822.272733] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8f943617-ed06-46c8-a02d-869700dc1f17 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Lock "2ab5f28c-1f71-4bea-8733-523e5570f5c6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 65.255s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1822.411211] env[62405]: DEBUG oslo_concurrency.lockutils [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Releasing lock "refresh_cache-b4b89cf6-4159-40fa-8b67-4d8bbf16eb32" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1822.411571] env[62405]: DEBUG nova.compute.manager [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: b4b89cf6-4159-40fa-8b67-4d8bbf16eb32] Instance network_info: |[{"id": "ff6b512b-10d4-455e-85af-39beb8f916a2", "address": "fa:16:3e:a6:89:ba", "network": {"id": "954a06a6-91b0-4e43-a964-815d7cb120a7", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1314830830-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "774aaaffb55b401eae1c919aa2f45675", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53955f0e-c162-4cef-8bd5-335b369c36b6", "external-id": "nsx-vlan-transportzone-623", "segmentation_id": 623, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff6b512b-10", "ovs_interfaceid": "ff6b512b-10d4-455e-85af-39beb8f916a2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1822.411866] env[62405]: DEBUG oslo_concurrency.lockutils [req-4a918b70-2432-4722-b156-758335537158 req-fee97684-d5d4-46d6-8a56-6f5221943a18 service nova] Acquired lock "refresh_cache-b4b89cf6-4159-40fa-8b67-4d8bbf16eb32" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1822.412159] env[62405]: DEBUG nova.network.neutron [req-4a918b70-2432-4722-b156-758335537158 req-fee97684-d5d4-46d6-8a56-6f5221943a18 service nova] [instance: b4b89cf6-4159-40fa-8b67-4d8bbf16eb32] Refreshing network info cache for port ff6b512b-10d4-455e-85af-39beb8f916a2 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1822.413381] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: b4b89cf6-4159-40fa-8b67-4d8bbf16eb32] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a6:89:ba', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '53955f0e-c162-4cef-8bd5-335b369c36b6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ff6b512b-10d4-455e-85af-39beb8f916a2', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1822.421717] env[62405]: DEBUG oslo.service.loopingcall [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1822.421832] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b4b89cf6-4159-40fa-8b67-4d8bbf16eb32] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1822.422608] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f2552a7e-c22d-4b75-b4c0-58cd941f4833 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.459760] env[62405]: DEBUG oslo_vmware.api [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': task-1947598, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1822.460399] env[62405]: DEBUG oslo_vmware.api [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947597, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1822.461700] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1822.461700] env[62405]: value = "task-1947599" [ 1822.461700] env[62405]: _type = "Task" [ 1822.461700] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1822.470714] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947599, 'name': CreateVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1822.481673] env[62405]: DEBUG oslo_vmware.api [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]520b4499-b521-9853-ab36-449fc4deb803, 'name': SearchDatastore_Task, 'duration_secs': 0.013266} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1822.481940] env[62405]: DEBUG oslo_concurrency.lockutils [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1822.482292] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 00158b10-4292-48f3-85a0-991af1dbc5f1/00158b10-4292-48f3-85a0-991af1dbc5f1.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1822.482632] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c2d28b4c-ce56-4e22-b6b2-b23bd0eba29f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.489964] env[62405]: DEBUG oslo_vmware.api [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Waiting for the task: (returnval){ [ 1822.489964] env[62405]: value = "task-1947600" [ 1822.489964] env[62405]: _type = "Task" [ 1822.489964] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1822.500115] env[62405]: DEBUG oslo_vmware.api [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Task: {'id': task-1947600, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1822.574907] env[62405]: DEBUG oslo_concurrency.lockutils [None req-298f3e3e-50d5-4c87-be64-626a9ecfe54d tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.464s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1822.575494] env[62405]: DEBUG nova.compute.manager [None req-298f3e3e-50d5-4c87-be64-626a9ecfe54d tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] [instance: 73c5b28f-d21d-4ffc-9e67-911e4fb4db66] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1822.577920] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d467200b-cd1a-4978-aae0-70efe957fab3 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.425s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1822.578163] env[62405]: DEBUG nova.objects.instance [None req-d467200b-cd1a-4978-aae0-70efe957fab3 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Lazy-loading 'resources' on Instance uuid 153adb6e-5381-4e91-881e-8e566a16905a {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1822.960451] env[62405]: DEBUG oslo_vmware.api [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947597, 'name': ReconfigVM_Task, 'duration_secs': 0.656105} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1822.965528] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: af174cbf-3555-42b0-bacd-033f9ff46f08] Reconfigured VM instance instance-0000004a to attach disk [datastore1] af174cbf-3555-42b0-bacd-033f9ff46f08/af174cbf-3555-42b0-bacd-033f9ff46f08.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1822.967122] env[62405]: DEBUG oslo_vmware.api [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': task-1947598, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.538384} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1822.967122] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-700c4fa3-4efa-4f4a-a079-f01d6d457e68 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.973083] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d/d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1822.973083] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1822.973083] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2e15156c-762b-40b2-8942-8999230fd0f0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.982315] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947599, 'name': CreateVM_Task, 'duration_secs': 0.449325} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1822.985218] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b4b89cf6-4159-40fa-8b67-4d8bbf16eb32] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1822.985696] env[62405]: DEBUG oslo_vmware.api [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Waiting for the task: (returnval){ [ 1822.985696] env[62405]: value = "task-1947602" [ 1822.985696] env[62405]: _type = "Task" [ 1822.985696] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1822.986149] env[62405]: DEBUG oslo_vmware.api [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for the task: (returnval){ [ 1822.986149] env[62405]: value = "task-1947601" [ 1822.986149] env[62405]: _type = "Task" [ 1822.986149] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1822.989353] env[62405]: DEBUG oslo_concurrency.lockutils [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1822.989590] env[62405]: DEBUG oslo_concurrency.lockutils [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1822.989972] env[62405]: DEBUG oslo_concurrency.lockutils [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1822.990531] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b2589730-3b26-4b57-976b-9da26c3f797c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.007891] env[62405]: DEBUG oslo_vmware.api [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Waiting for the task: (returnval){ [ 1823.007891] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5210dc12-427d-c3cf-31f3-9aa46f0596c8" [ 1823.007891] env[62405]: _type = "Task" [ 1823.007891] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1823.018207] env[62405]: DEBUG oslo_vmware.api [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': task-1947602, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1823.018606] env[62405]: DEBUG oslo_vmware.api [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Task: {'id': task-1947600, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.516059} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1823.018876] env[62405]: DEBUG oslo_vmware.api [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947601, 'name': Rename_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1823.022289] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 00158b10-4292-48f3-85a0-991af1dbc5f1/00158b10-4292-48f3-85a0-991af1dbc5f1.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1823.022577] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: 00158b10-4292-48f3-85a0-991af1dbc5f1] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1823.022890] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bf3edc5c-e4bc-4a12-bbc9-0ba25f2157ea {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.031913] env[62405]: DEBUG oslo_vmware.api [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5210dc12-427d-c3cf-31f3-9aa46f0596c8, 'name': SearchDatastore_Task, 'duration_secs': 0.009534} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1823.033580] env[62405]: DEBUG oslo_concurrency.lockutils [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1823.033962] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: b4b89cf6-4159-40fa-8b67-4d8bbf16eb32] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1823.034347] env[62405]: DEBUG oslo_concurrency.lockutils [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1823.034589] env[62405]: DEBUG oslo_concurrency.lockutils [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1823.034917] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1823.035376] env[62405]: DEBUG oslo_vmware.api [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Waiting for the task: (returnval){ [ 1823.035376] env[62405]: value = "task-1947603" [ 1823.035376] env[62405]: _type = "Task" [ 1823.035376] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1823.035737] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5f2ebdf3-90db-423d-8a6a-e9421a9fa946 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.046508] env[62405]: DEBUG oslo_vmware.api [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Task: {'id': task-1947603, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1823.047879] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1823.048088] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1823.048857] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-34ce516d-050a-4412-bebb-bf99c28172e5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.055474] env[62405]: DEBUG oslo_vmware.api [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Waiting for the task: (returnval){ [ 1823.055474] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5212f14e-ee84-dfea-a956-7799b4f24e56" [ 1823.055474] env[62405]: _type = "Task" [ 1823.055474] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1823.059299] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a850d796-3c6b-462c-b92a-1d17ad0461e5 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Acquiring lock "2ab5f28c-1f71-4bea-8733-523e5570f5c6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1823.059541] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a850d796-3c6b-462c-b92a-1d17ad0461e5 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Lock "2ab5f28c-1f71-4bea-8733-523e5570f5c6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1823.059755] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a850d796-3c6b-462c-b92a-1d17ad0461e5 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Acquiring lock "2ab5f28c-1f71-4bea-8733-523e5570f5c6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1823.059933] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a850d796-3c6b-462c-b92a-1d17ad0461e5 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Lock "2ab5f28c-1f71-4bea-8733-523e5570f5c6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1823.060115] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a850d796-3c6b-462c-b92a-1d17ad0461e5 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Lock "2ab5f28c-1f71-4bea-8733-523e5570f5c6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1823.062788] env[62405]: INFO nova.compute.manager [None req-a850d796-3c6b-462c-b92a-1d17ad0461e5 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 2ab5f28c-1f71-4bea-8733-523e5570f5c6] Terminating instance [ 1823.069919] env[62405]: DEBUG oslo_vmware.api [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5212f14e-ee84-dfea-a956-7799b4f24e56, 'name': SearchDatastore_Task, 'duration_secs': 0.0088} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1823.071849] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c834dcd0-db08-4333-8375-603901083f88 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.082425] env[62405]: DEBUG oslo_vmware.api [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Waiting for the task: (returnval){ [ 1823.082425] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]526d5714-4b2f-70f0-d807-f612a9c46d9b" [ 1823.082425] env[62405]: _type = "Task" [ 1823.082425] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1823.083948] env[62405]: DEBUG nova.compute.utils [None req-298f3e3e-50d5-4c87-be64-626a9ecfe54d tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1823.085627] env[62405]: DEBUG nova.compute.manager [None req-298f3e3e-50d5-4c87-be64-626a9ecfe54d tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] [instance: 73c5b28f-d21d-4ffc-9e67-911e4fb4db66] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1823.085794] env[62405]: DEBUG nova.network.neutron [None req-298f3e3e-50d5-4c87-be64-626a9ecfe54d tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] [instance: 73c5b28f-d21d-4ffc-9e67-911e4fb4db66] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1823.100176] env[62405]: DEBUG oslo_vmware.api [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]526d5714-4b2f-70f0-d807-f612a9c46d9b, 'name': SearchDatastore_Task, 'duration_secs': 0.010871} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1823.102883] env[62405]: DEBUG oslo_concurrency.lockutils [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1823.102883] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] b4b89cf6-4159-40fa-8b67-4d8bbf16eb32/b4b89cf6-4159-40fa-8b67-4d8bbf16eb32.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1823.103380] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0e96b83b-e94d-4728-8973-fc0dbfe815f0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.111562] env[62405]: DEBUG oslo_vmware.api [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Waiting for the task: (returnval){ [ 1823.111562] env[62405]: value = "task-1947604" [ 1823.111562] env[62405]: _type = "Task" [ 1823.111562] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1823.121757] env[62405]: DEBUG oslo_vmware.api [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Task: {'id': task-1947604, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1823.148381] env[62405]: DEBUG nova.policy [None req-298f3e3e-50d5-4c87-be64-626a9ecfe54d tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5eb8bdf5d3f34ce4ad48aba0697cfd4f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4a497089c23946fd97e9f5061ef34ff1', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1823.322129] env[62405]: DEBUG nova.network.neutron [req-4a918b70-2432-4722-b156-758335537158 req-fee97684-d5d4-46d6-8a56-6f5221943a18 service nova] [instance: b4b89cf6-4159-40fa-8b67-4d8bbf16eb32] Updated VIF entry in instance network info cache for port ff6b512b-10d4-455e-85af-39beb8f916a2. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1823.322236] env[62405]: DEBUG nova.network.neutron [req-4a918b70-2432-4722-b156-758335537158 req-fee97684-d5d4-46d6-8a56-6f5221943a18 service nova] [instance: b4b89cf6-4159-40fa-8b67-4d8bbf16eb32] Updating instance_info_cache with network_info: [{"id": "ff6b512b-10d4-455e-85af-39beb8f916a2", "address": "fa:16:3e:a6:89:ba", "network": {"id": "954a06a6-91b0-4e43-a964-815d7cb120a7", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1314830830-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "774aaaffb55b401eae1c919aa2f45675", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53955f0e-c162-4cef-8bd5-335b369c36b6", "external-id": "nsx-vlan-transportzone-623", "segmentation_id": 623, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff6b512b-10", "ovs_interfaceid": "ff6b512b-10d4-455e-85af-39beb8f916a2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1823.488626] env[62405]: DEBUG nova.network.neutron [None req-298f3e3e-50d5-4c87-be64-626a9ecfe54d tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] [instance: 73c5b28f-d21d-4ffc-9e67-911e4fb4db66] Successfully created port: 8ec05620-75a4-4851-8cba-d4fc068e33e8 {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1823.506091] env[62405]: DEBUG oslo_vmware.api [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947601, 'name': Rename_Task, 'duration_secs': 0.13804} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1823.509448] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: af174cbf-3555-42b0-bacd-033f9ff46f08] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1823.509792] env[62405]: DEBUG oslo_vmware.api [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': task-1947602, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.09727} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1823.510013] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-157b1930-625d-4dbf-84bc-77e2ce620667 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.511870] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1823.516045] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-062861a6-508e-4e60-8f78-529ac844711e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.544720] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d] Reconfiguring VM instance instance-0000004b to attach disk [datastore1] d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d/d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1823.552788] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8523f273-588e-4dbf-acd1-561685b23589 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.568832] env[62405]: DEBUG oslo_vmware.api [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for the task: (returnval){ [ 1823.568832] env[62405]: value = "task-1947605" [ 1823.568832] env[62405]: _type = "Task" [ 1823.568832] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1823.576968] env[62405]: DEBUG nova.compute.manager [None req-a850d796-3c6b-462c-b92a-1d17ad0461e5 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 2ab5f28c-1f71-4bea-8733-523e5570f5c6] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1823.577265] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-a850d796-3c6b-462c-b92a-1d17ad0461e5 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 2ab5f28c-1f71-4bea-8733-523e5570f5c6] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1823.579211] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d262d152-e423-4095-a011-5f9526bd71ed {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.582162] env[62405]: DEBUG oslo_vmware.api [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Waiting for the task: (returnval){ [ 1823.582162] env[62405]: value = "task-1947606" [ 1823.582162] env[62405]: _type = "Task" [ 1823.582162] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1823.588966] env[62405]: DEBUG oslo_vmware.api [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Task: {'id': task-1947603, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064176} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1823.596022] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: 00158b10-4292-48f3-85a0-991af1dbc5f1] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1823.596977] env[62405]: DEBUG nova.compute.manager [None req-298f3e3e-50d5-4c87-be64-626a9ecfe54d tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] [instance: 73c5b28f-d21d-4ffc-9e67-911e4fb4db66] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1823.599174] env[62405]: DEBUG oslo_vmware.api [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947605, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1823.603221] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-703e10b0-24b9-4cfd-af91-dc219ae70b7d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.605923] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-a850d796-3c6b-462c-b92a-1d17ad0461e5 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 2ab5f28c-1f71-4bea-8733-523e5570f5c6] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1823.609212] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-86bf3cc0-9aa8-4fbc-b0ea-d731f5270e08 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.614752] env[62405]: DEBUG oslo_vmware.api [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': task-1947606, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1823.636559] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: 00158b10-4292-48f3-85a0-991af1dbc5f1] Reconfiguring VM instance instance-0000004c to attach disk [datastore1] 00158b10-4292-48f3-85a0-991af1dbc5f1/00158b10-4292-48f3-85a0-991af1dbc5f1.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1823.640239] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a26b2e5b-435f-46b1-aede-bb8d7c77d2f4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.655972] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbe23341-2571-4e76-819e-79eddf149ed5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.661203] env[62405]: DEBUG oslo_vmware.api [None req-a850d796-3c6b-462c-b92a-1d17ad0461e5 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Waiting for the task: (returnval){ [ 1823.661203] env[62405]: value = "task-1947607" [ 1823.661203] env[62405]: _type = "Task" [ 1823.661203] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1823.665040] env[62405]: DEBUG oslo_vmware.api [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Task: {'id': task-1947604, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.457611} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1823.668698] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] b4b89cf6-4159-40fa-8b67-4d8bbf16eb32/b4b89cf6-4159-40fa-8b67-4d8bbf16eb32.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1823.668917] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: b4b89cf6-4159-40fa-8b67-4d8bbf16eb32] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1823.672071] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-70bab81b-dae3-468d-abc3-fe94d8c802b7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.676125] env[62405]: DEBUG oslo_vmware.api [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Waiting for the task: (returnval){ [ 1823.676125] env[62405]: value = "task-1947608" [ 1823.676125] env[62405]: _type = "Task" [ 1823.676125] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1823.676125] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c146d6fe-8c62-4949-a27c-4eee5efd4f4d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.684862] env[62405]: DEBUG oslo_vmware.api [None req-a850d796-3c6b-462c-b92a-1d17ad0461e5 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947607, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1823.686548] env[62405]: DEBUG oslo_vmware.api [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Waiting for the task: (returnval){ [ 1823.686548] env[62405]: value = "task-1947609" [ 1823.686548] env[62405]: _type = "Task" [ 1823.686548] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1823.718544] env[62405]: DEBUG oslo_vmware.api [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Task: {'id': task-1947608, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1823.720653] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e8fdf0f-f030-4865-9714-7fbb6028dac7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.726436] env[62405]: DEBUG oslo_vmware.api [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Task: {'id': task-1947609, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1823.731592] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-727968c7-430f-4715-a21e-a981b6148de6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.745993] env[62405]: DEBUG nova.compute.provider_tree [None req-d467200b-cd1a-4978-aae0-70efe957fab3 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1823.827785] env[62405]: DEBUG oslo_concurrency.lockutils [req-4a918b70-2432-4722-b156-758335537158 req-fee97684-d5d4-46d6-8a56-6f5221943a18 service nova] Releasing lock "refresh_cache-b4b89cf6-4159-40fa-8b67-4d8bbf16eb32" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1824.083384] env[62405]: DEBUG oslo_vmware.api [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947605, 'name': PowerOnVM_Task} progress is 94%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1824.097936] env[62405]: DEBUG oslo_vmware.api [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': task-1947606, 'name': ReconfigVM_Task, 'duration_secs': 0.391254} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1824.098280] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d] Reconfigured VM instance instance-0000004b to attach disk [datastore1] d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d/d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1824.098943] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8fd76ea2-ee64-48ed-9b75-72aa95bd5959 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.109730] env[62405]: DEBUG oslo_vmware.api [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Waiting for the task: (returnval){ [ 1824.109730] env[62405]: value = "task-1947610" [ 1824.109730] env[62405]: _type = "Task" [ 1824.109730] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1824.131412] env[62405]: DEBUG oslo_vmware.api [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': task-1947610, 'name': Rename_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1824.176361] env[62405]: DEBUG oslo_vmware.api [None req-a850d796-3c6b-462c-b92a-1d17ad0461e5 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947607, 'name': PowerOffVM_Task, 'duration_secs': 0.235246} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1824.176771] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-a850d796-3c6b-462c-b92a-1d17ad0461e5 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 2ab5f28c-1f71-4bea-8733-523e5570f5c6] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1824.176998] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-a850d796-3c6b-462c-b92a-1d17ad0461e5 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 2ab5f28c-1f71-4bea-8733-523e5570f5c6] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1824.177347] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-83e45f52-70ef-4e09-a58f-f649cd87b499 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.189799] env[62405]: DEBUG oslo_vmware.api [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Task: {'id': task-1947608, 'name': ReconfigVM_Task, 'duration_secs': 0.376746} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1824.193429] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: 00158b10-4292-48f3-85a0-991af1dbc5f1] Reconfigured VM instance instance-0000004c to attach disk [datastore1] 00158b10-4292-48f3-85a0-991af1dbc5f1/00158b10-4292-48f3-85a0-991af1dbc5f1.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1824.194187] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c113d3ba-e7cb-4176-95d0-2878d97d4ffc {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.201217] env[62405]: DEBUG oslo_vmware.api [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Task: {'id': task-1947609, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.500426} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1824.202606] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: b4b89cf6-4159-40fa-8b67-4d8bbf16eb32] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1824.202962] env[62405]: DEBUG oslo_vmware.api [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Waiting for the task: (returnval){ [ 1824.202962] env[62405]: value = "task-1947612" [ 1824.202962] env[62405]: _type = "Task" [ 1824.202962] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1824.203657] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56dc8676-977b-43c9-a725-ef66f0b775e7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.230176] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: b4b89cf6-4159-40fa-8b67-4d8bbf16eb32] Reconfiguring VM instance instance-0000004d to attach disk [datastore1] b4b89cf6-4159-40fa-8b67-4d8bbf16eb32/b4b89cf6-4159-40fa-8b67-4d8bbf16eb32.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1824.233775] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fb2cf142-81b9-4314-8ecc-a8fbf23d6e49 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.247866] env[62405]: DEBUG oslo_vmware.api [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Task: {'id': task-1947612, 'name': Rename_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1824.248797] env[62405]: DEBUG nova.scheduler.client.report [None req-d467200b-cd1a-4978-aae0-70efe957fab3 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1824.256364] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-a850d796-3c6b-462c-b92a-1d17ad0461e5 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 2ab5f28c-1f71-4bea-8733-523e5570f5c6] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1824.256613] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-a850d796-3c6b-462c-b92a-1d17ad0461e5 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 2ab5f28c-1f71-4bea-8733-523e5570f5c6] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1824.256784] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-a850d796-3c6b-462c-b92a-1d17ad0461e5 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Deleting the datastore file [datastore1] 2ab5f28c-1f71-4bea-8733-523e5570f5c6 {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1824.258080] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2844459a-0f0c-4e52-941e-20ea041b63a4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.261242] env[62405]: DEBUG oslo_vmware.api [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Waiting for the task: (returnval){ [ 1824.261242] env[62405]: value = "task-1947613" [ 1824.261242] env[62405]: _type = "Task" [ 1824.261242] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1824.267870] env[62405]: DEBUG oslo_vmware.api [None req-a850d796-3c6b-462c-b92a-1d17ad0461e5 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Waiting for the task: (returnval){ [ 1824.267870] env[62405]: value = "task-1947614" [ 1824.267870] env[62405]: _type = "Task" [ 1824.267870] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1824.274650] env[62405]: DEBUG oslo_vmware.api [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Task: {'id': task-1947613, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1824.280695] env[62405]: DEBUG oslo_vmware.api [None req-a850d796-3c6b-462c-b92a-1d17ad0461e5 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947614, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1824.583351] env[62405]: DEBUG oslo_vmware.api [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947605, 'name': PowerOnVM_Task, 'duration_secs': 0.706002} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1824.583625] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: af174cbf-3555-42b0-bacd-033f9ff46f08] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1824.583834] env[62405]: INFO nova.compute.manager [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: af174cbf-3555-42b0-bacd-033f9ff46f08] Took 12.30 seconds to spawn the instance on the hypervisor. [ 1824.584022] env[62405]: DEBUG nova.compute.manager [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: af174cbf-3555-42b0-bacd-033f9ff46f08] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1824.584815] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab287e62-0d6b-4f77-87ca-044be8fdadaa {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.612017] env[62405]: DEBUG nova.compute.manager [None req-298f3e3e-50d5-4c87-be64-626a9ecfe54d tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] [instance: 73c5b28f-d21d-4ffc-9e67-911e4fb4db66] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1824.623211] env[62405]: DEBUG oslo_vmware.api [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': task-1947610, 'name': Rename_Task, 'duration_secs': 0.152652} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1824.624388] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1824.624612] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0d863b07-405c-4c7c-abb1-5a197488f208 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.631838] env[62405]: DEBUG oslo_vmware.api [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Waiting for the task: (returnval){ [ 1824.631838] env[62405]: value = "task-1947615" [ 1824.631838] env[62405]: _type = "Task" [ 1824.631838] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1824.641525] env[62405]: DEBUG oslo_vmware.api [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': task-1947615, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1824.643546] env[62405]: DEBUG nova.virt.hardware [None req-298f3e3e-50d5-4c87-be64-626a9ecfe54d tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1824.643738] env[62405]: DEBUG nova.virt.hardware [None req-298f3e3e-50d5-4c87-be64-626a9ecfe54d tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1824.643896] env[62405]: DEBUG nova.virt.hardware [None req-298f3e3e-50d5-4c87-be64-626a9ecfe54d tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1824.644098] env[62405]: DEBUG nova.virt.hardware [None req-298f3e3e-50d5-4c87-be64-626a9ecfe54d tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1824.644252] env[62405]: DEBUG nova.virt.hardware [None req-298f3e3e-50d5-4c87-be64-626a9ecfe54d tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1824.644404] env[62405]: DEBUG nova.virt.hardware [None req-298f3e3e-50d5-4c87-be64-626a9ecfe54d tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1824.644643] env[62405]: DEBUG nova.virt.hardware [None req-298f3e3e-50d5-4c87-be64-626a9ecfe54d tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1824.644811] env[62405]: DEBUG nova.virt.hardware [None req-298f3e3e-50d5-4c87-be64-626a9ecfe54d tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1824.644980] env[62405]: DEBUG nova.virt.hardware [None req-298f3e3e-50d5-4c87-be64-626a9ecfe54d tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1824.645159] env[62405]: DEBUG nova.virt.hardware [None req-298f3e3e-50d5-4c87-be64-626a9ecfe54d tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1824.645334] env[62405]: DEBUG nova.virt.hardware [None req-298f3e3e-50d5-4c87-be64-626a9ecfe54d tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1824.646308] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccf187ba-4076-4f43-b6ef-5937b9945f35 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.654237] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b38810f7-5fc1-4dd3-b562-4a9c5c6fb3c4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.715193] env[62405]: DEBUG oslo_vmware.api [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Task: {'id': task-1947612, 'name': Rename_Task, 'duration_secs': 0.160211} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1824.715483] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: 00158b10-4292-48f3-85a0-991af1dbc5f1] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1824.715723] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f3a2db78-2967-4772-9b21-902a238b9fb9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.722140] env[62405]: DEBUG oslo_vmware.api [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Waiting for the task: (returnval){ [ 1824.722140] env[62405]: value = "task-1947616" [ 1824.722140] env[62405]: _type = "Task" [ 1824.722140] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1824.729541] env[62405]: DEBUG oslo_vmware.api [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Task: {'id': task-1947616, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1824.753714] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d467200b-cd1a-4978-aae0-70efe957fab3 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.176s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1824.756272] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3ae74378-1806-4a46-8b31-966b987947fc tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 12.834s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1824.772613] env[62405]: DEBUG oslo_vmware.api [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Task: {'id': task-1947613, 'name': ReconfigVM_Task, 'duration_secs': 0.27163} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1824.775805] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: b4b89cf6-4159-40fa-8b67-4d8bbf16eb32] Reconfigured VM instance instance-0000004d to attach disk [datastore1] b4b89cf6-4159-40fa-8b67-4d8bbf16eb32/b4b89cf6-4159-40fa-8b67-4d8bbf16eb32.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1824.777212] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-91e3355e-22a0-443a-b9d5-fabbd94d5d01 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.779042] env[62405]: INFO nova.scheduler.client.report [None req-d467200b-cd1a-4978-aae0-70efe957fab3 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Deleted allocations for instance 153adb6e-5381-4e91-881e-8e566a16905a [ 1824.786414] env[62405]: DEBUG oslo_vmware.api [None req-a850d796-3c6b-462c-b92a-1d17ad0461e5 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947614, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1824.789855] env[62405]: DEBUG oslo_vmware.api [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Waiting for the task: (returnval){ [ 1824.789855] env[62405]: value = "task-1947617" [ 1824.789855] env[62405]: _type = "Task" [ 1824.789855] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1824.800399] env[62405]: DEBUG oslo_vmware.api [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Task: {'id': task-1947617, 'name': Rename_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1825.103072] env[62405]: INFO nova.compute.manager [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: af174cbf-3555-42b0-bacd-033f9ff46f08] Took 37.37 seconds to build instance. [ 1825.149043] env[62405]: DEBUG oslo_vmware.api [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': task-1947615, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1825.232405] env[62405]: DEBUG oslo_vmware.api [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Task: {'id': task-1947616, 'name': PowerOnVM_Task} progress is 90%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1825.283523] env[62405]: DEBUG oslo_vmware.api [None req-a850d796-3c6b-462c-b92a-1d17ad0461e5 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947614, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.595307} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1825.283523] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-a850d796-3c6b-462c-b92a-1d17ad0461e5 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1825.283523] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-a850d796-3c6b-462c-b92a-1d17ad0461e5 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 2ab5f28c-1f71-4bea-8733-523e5570f5c6] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1825.283523] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-a850d796-3c6b-462c-b92a-1d17ad0461e5 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 2ab5f28c-1f71-4bea-8733-523e5570f5c6] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1825.284241] env[62405]: INFO nova.compute.manager [None req-a850d796-3c6b-462c-b92a-1d17ad0461e5 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 2ab5f28c-1f71-4bea-8733-523e5570f5c6] Took 1.71 seconds to destroy the instance on the hypervisor. [ 1825.284567] env[62405]: DEBUG oslo.service.loopingcall [None req-a850d796-3c6b-462c-b92a-1d17ad0461e5 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1825.284806] env[62405]: DEBUG nova.compute.manager [-] [instance: 2ab5f28c-1f71-4bea-8733-523e5570f5c6] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1825.284896] env[62405]: DEBUG nova.network.neutron [-] [instance: 2ab5f28c-1f71-4bea-8733-523e5570f5c6] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1825.298437] env[62405]: DEBUG nova.compute.manager [req-d9e5db3b-21e8-44c6-8960-2fcea34d8104 req-7a72f428-ac27-44ef-9dc5-32b070a621d3 service nova] [instance: 73c5b28f-d21d-4ffc-9e67-911e4fb4db66] Received event network-vif-plugged-8ec05620-75a4-4851-8cba-d4fc068e33e8 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1825.298759] env[62405]: DEBUG oslo_concurrency.lockutils [req-d9e5db3b-21e8-44c6-8960-2fcea34d8104 req-7a72f428-ac27-44ef-9dc5-32b070a621d3 service nova] Acquiring lock "73c5b28f-d21d-4ffc-9e67-911e4fb4db66-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1825.299096] env[62405]: DEBUG oslo_concurrency.lockutils [req-d9e5db3b-21e8-44c6-8960-2fcea34d8104 req-7a72f428-ac27-44ef-9dc5-32b070a621d3 service nova] Lock "73c5b28f-d21d-4ffc-9e67-911e4fb4db66-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1825.299173] env[62405]: DEBUG oslo_concurrency.lockutils [req-d9e5db3b-21e8-44c6-8960-2fcea34d8104 req-7a72f428-ac27-44ef-9dc5-32b070a621d3 service nova] Lock "73c5b28f-d21d-4ffc-9e67-911e4fb4db66-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1825.299986] env[62405]: DEBUG nova.compute.manager [req-d9e5db3b-21e8-44c6-8960-2fcea34d8104 req-7a72f428-ac27-44ef-9dc5-32b070a621d3 service nova] [instance: 73c5b28f-d21d-4ffc-9e67-911e4fb4db66] No waiting events found dispatching network-vif-plugged-8ec05620-75a4-4851-8cba-d4fc068e33e8 {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1825.299986] env[62405]: WARNING nova.compute.manager [req-d9e5db3b-21e8-44c6-8960-2fcea34d8104 req-7a72f428-ac27-44ef-9dc5-32b070a621d3 service nova] [instance: 73c5b28f-d21d-4ffc-9e67-911e4fb4db66] Received unexpected event network-vif-plugged-8ec05620-75a4-4851-8cba-d4fc068e33e8 for instance with vm_state building and task_state spawning. [ 1825.299986] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d467200b-cd1a-4978-aae0-70efe957fab3 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Lock "153adb6e-5381-4e91-881e-8e566a16905a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.517s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1825.307520] env[62405]: DEBUG oslo_vmware.api [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Task: {'id': task-1947617, 'name': Rename_Task, 'duration_secs': 0.156569} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1825.308562] env[62405]: DEBUG nova.network.neutron [None req-298f3e3e-50d5-4c87-be64-626a9ecfe54d tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] [instance: 73c5b28f-d21d-4ffc-9e67-911e4fb4db66] Successfully updated port: 8ec05620-75a4-4851-8cba-d4fc068e33e8 {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1825.311770] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: b4b89cf6-4159-40fa-8b67-4d8bbf16eb32] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1825.312827] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e57b3e40-af29-4cdd-a2a3-3de68d4f0999 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.319852] env[62405]: DEBUG oslo_vmware.api [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Waiting for the task: (returnval){ [ 1825.319852] env[62405]: value = "task-1947618" [ 1825.319852] env[62405]: _type = "Task" [ 1825.319852] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1825.333567] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ad668ba8-85b7-4e91-af69-8aab8973e109 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquiring lock "af174cbf-3555-42b0-bacd-033f9ff46f08" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1825.338150] env[62405]: DEBUG oslo_vmware.api [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Task: {'id': task-1947618, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1825.605742] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7801dd64-5289-440e-aa01-b733c98d03e3 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Lock "af174cbf-3555-42b0-bacd-033f9ff46f08" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 62.841s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1825.606490] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ad668ba8-85b7-4e91-af69-8aab8973e109 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Lock "af174cbf-3555-42b0-bacd-033f9ff46f08" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.273s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1825.606490] env[62405]: DEBUG nova.compute.manager [None req-ad668ba8-85b7-4e91-af69-8aab8973e109 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: af174cbf-3555-42b0-bacd-033f9ff46f08] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1825.607677] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c27ace08-bc9a-46fc-bab5-aa0aeafe4893 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.617308] env[62405]: DEBUG nova.compute.manager [None req-ad668ba8-85b7-4e91-af69-8aab8973e109 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: af174cbf-3555-42b0-bacd-033f9ff46f08] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62405) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1825.618127] env[62405]: DEBUG nova.objects.instance [None req-ad668ba8-85b7-4e91-af69-8aab8973e109 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Lazy-loading 'flavor' on Instance uuid af174cbf-3555-42b0-bacd-033f9ff46f08 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1825.645366] env[62405]: DEBUG oslo_vmware.api [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': task-1947615, 'name': PowerOnVM_Task, 'duration_secs': 0.582311} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1825.648015] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1825.648359] env[62405]: INFO nova.compute.manager [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d] Took 10.79 seconds to spawn the instance on the hypervisor. [ 1825.648696] env[62405]: DEBUG nova.compute.manager [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1825.650198] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc293596-ddff-468a-a0f7-d1685fa8e4d9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.667849] env[62405]: DEBUG nova.compute.manager [req-bf83decb-13f3-4a78-991f-a8592d216c90 req-ca56b658-6e8c-40bc-9b22-992bbe314c49 service nova] [instance: 2ab5f28c-1f71-4bea-8733-523e5570f5c6] Received event network-vif-deleted-e4459292-2e66-49b3-bca3-94dc7cd7afbc {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1825.668692] env[62405]: INFO nova.compute.manager [req-bf83decb-13f3-4a78-991f-a8592d216c90 req-ca56b658-6e8c-40bc-9b22-992bbe314c49 service nova] [instance: 2ab5f28c-1f71-4bea-8733-523e5570f5c6] Neutron deleted interface e4459292-2e66-49b3-bca3-94dc7cd7afbc; detaching it from the instance and deleting it from the info cache [ 1825.668692] env[62405]: DEBUG nova.network.neutron [req-bf83decb-13f3-4a78-991f-a8592d216c90 req-ca56b658-6e8c-40bc-9b22-992bbe314c49 service nova] [instance: 2ab5f28c-1f71-4bea-8733-523e5570f5c6] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1825.715255] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2df2c4c5-365c-4a1f-91a5-08f45d3564d4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.727171] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39429f9a-8f87-4020-a2b7-4931bd05fcb6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.736274] env[62405]: DEBUG oslo_vmware.api [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Task: {'id': task-1947616, 'name': PowerOnVM_Task, 'duration_secs': 0.872682} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1825.761019] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: 00158b10-4292-48f3-85a0-991af1dbc5f1] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1825.761307] env[62405]: INFO nova.compute.manager [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: 00158b10-4292-48f3-85a0-991af1dbc5f1] Took 8.34 seconds to spawn the instance on the hypervisor. [ 1825.761499] env[62405]: DEBUG nova.compute.manager [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: 00158b10-4292-48f3-85a0-991af1dbc5f1] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1825.763041] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-441c0346-4c94-436d-b5ef-e2d171637d48 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.765918] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61a4b6d9-7e5e-4235-82d8-9601495b7a42 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.775085] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c10a747-f9cc-4aec-b0b9-44512005ea9b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.792581] env[62405]: DEBUG nova.compute.provider_tree [None req-3ae74378-1806-4a46-8b31-966b987947fc tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1825.813241] env[62405]: DEBUG oslo_concurrency.lockutils [None req-298f3e3e-50d5-4c87-be64-626a9ecfe54d tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Acquiring lock "refresh_cache-73c5b28f-d21d-4ffc-9e67-911e4fb4db66" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1825.813389] env[62405]: DEBUG oslo_concurrency.lockutils [None req-298f3e3e-50d5-4c87-be64-626a9ecfe54d tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Acquired lock "refresh_cache-73c5b28f-d21d-4ffc-9e67-911e4fb4db66" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1825.813540] env[62405]: DEBUG nova.network.neutron [None req-298f3e3e-50d5-4c87-be64-626a9ecfe54d tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] [instance: 73c5b28f-d21d-4ffc-9e67-911e4fb4db66] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1825.830592] env[62405]: DEBUG oslo_vmware.api [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Task: {'id': task-1947618, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1826.140955] env[62405]: DEBUG nova.network.neutron [-] [instance: 2ab5f28c-1f71-4bea-8733-523e5570f5c6] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1826.177940] env[62405]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5bb9ffe8-bd0f-44d0-a0ae-f00f01766163 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.178418] env[62405]: INFO nova.compute.manager [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d] Took 38.34 seconds to build instance. [ 1826.186381] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc931618-68e5-46be-af40-89ee2ec63b2e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.227804] env[62405]: DEBUG nova.compute.manager [req-bf83decb-13f3-4a78-991f-a8592d216c90 req-ca56b658-6e8c-40bc-9b22-992bbe314c49 service nova] [instance: 2ab5f28c-1f71-4bea-8733-523e5570f5c6] Detach interface failed, port_id=e4459292-2e66-49b3-bca3-94dc7cd7afbc, reason: Instance 2ab5f28c-1f71-4bea-8733-523e5570f5c6 could not be found. {{(pid=62405) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11483}} [ 1826.289094] env[62405]: INFO nova.compute.manager [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: 00158b10-4292-48f3-85a0-991af1dbc5f1] Took 37.33 seconds to build instance. [ 1826.295393] env[62405]: DEBUG nova.scheduler.client.report [None req-3ae74378-1806-4a46-8b31-966b987947fc tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1826.331446] env[62405]: DEBUG oslo_vmware.api [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Task: {'id': task-1947618, 'name': PowerOnVM_Task, 'duration_secs': 0.775709} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1826.331903] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: b4b89cf6-4159-40fa-8b67-4d8bbf16eb32] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1826.332212] env[62405]: INFO nova.compute.manager [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: b4b89cf6-4159-40fa-8b67-4d8bbf16eb32] Took 6.31 seconds to spawn the instance on the hypervisor. [ 1826.332599] env[62405]: DEBUG nova.compute.manager [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: b4b89cf6-4159-40fa-8b67-4d8bbf16eb32] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1826.333805] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cef10fc9-15e8-4867-bcc2-f7f0178807d2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.389789] env[62405]: DEBUG nova.network.neutron [None req-298f3e3e-50d5-4c87-be64-626a9ecfe54d tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] [instance: 73c5b28f-d21d-4ffc-9e67-911e4fb4db66] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1826.626419] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad668ba8-85b7-4e91-af69-8aab8973e109 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: af174cbf-3555-42b0-bacd-033f9ff46f08] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1826.626746] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-37880071-38cb-4446-b65a-028a5fff092b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.635973] env[62405]: DEBUG oslo_vmware.api [None req-ad668ba8-85b7-4e91-af69-8aab8973e109 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for the task: (returnval){ [ 1826.635973] env[62405]: value = "task-1947619" [ 1826.635973] env[62405]: _type = "Task" [ 1826.635973] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1826.645761] env[62405]: INFO nova.compute.manager [-] [instance: 2ab5f28c-1f71-4bea-8733-523e5570f5c6] Took 1.36 seconds to deallocate network for instance. [ 1826.651207] env[62405]: DEBUG oslo_vmware.api [None req-ad668ba8-85b7-4e91-af69-8aab8973e109 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947619, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1826.667572] env[62405]: DEBUG nova.network.neutron [None req-298f3e3e-50d5-4c87-be64-626a9ecfe54d tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] [instance: 73c5b28f-d21d-4ffc-9e67-911e4fb4db66] Updating instance_info_cache with network_info: [{"id": "8ec05620-75a4-4851-8cba-d4fc068e33e8", "address": "fa:16:3e:0c:78:a8", "network": {"id": "9e3e6e3d-df77-428f-b577-e4a42e82ec43", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.43", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "69dc3a146cd14230b1180689e2fea090", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31e77685-b4dd-4810-80ef-24115ea9ea62", "external-id": "nsx-vlan-transportzone-56", "segmentation_id": 56, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8ec05620-75", "ovs_interfaceid": "8ec05620-75a4-4851-8cba-d4fc068e33e8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1826.680630] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7a6ad462-8acc-4bcd-b085-9457175be0d3 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Lock "d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 51.314s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1826.791949] env[62405]: DEBUG oslo_concurrency.lockutils [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Lock "00158b10-4292-48f3-85a0-991af1dbc5f1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 42.635s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1826.851337] env[62405]: INFO nova.compute.manager [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: b4b89cf6-4159-40fa-8b67-4d8bbf16eb32] Took 36.36 seconds to build instance. [ 1827.145992] env[62405]: DEBUG oslo_vmware.api [None req-ad668ba8-85b7-4e91-af69-8aab8973e109 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947619, 'name': PowerOffVM_Task, 'duration_secs': 0.198483} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1827.146283] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad668ba8-85b7-4e91-af69-8aab8973e109 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: af174cbf-3555-42b0-bacd-033f9ff46f08] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1827.146483] env[62405]: DEBUG nova.compute.manager [None req-ad668ba8-85b7-4e91-af69-8aab8973e109 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: af174cbf-3555-42b0-bacd-033f9ff46f08] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1827.147298] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06d40c2c-d921-47e1-bbe2-14dc03d6b394 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.156127] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a850d796-3c6b-462c-b92a-1d17ad0461e5 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1827.171328] env[62405]: DEBUG oslo_concurrency.lockutils [None req-298f3e3e-50d5-4c87-be64-626a9ecfe54d tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Releasing lock "refresh_cache-73c5b28f-d21d-4ffc-9e67-911e4fb4db66" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1827.171666] env[62405]: DEBUG nova.compute.manager [None req-298f3e3e-50d5-4c87-be64-626a9ecfe54d tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] [instance: 73c5b28f-d21d-4ffc-9e67-911e4fb4db66] Instance network_info: |[{"id": "8ec05620-75a4-4851-8cba-d4fc068e33e8", "address": "fa:16:3e:0c:78:a8", "network": {"id": "9e3e6e3d-df77-428f-b577-e4a42e82ec43", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.43", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "69dc3a146cd14230b1180689e2fea090", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31e77685-b4dd-4810-80ef-24115ea9ea62", "external-id": "nsx-vlan-transportzone-56", "segmentation_id": 56, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8ec05620-75", "ovs_interfaceid": "8ec05620-75a4-4851-8cba-d4fc068e33e8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1827.172095] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-298f3e3e-50d5-4c87-be64-626a9ecfe54d tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] [instance: 73c5b28f-d21d-4ffc-9e67-911e4fb4db66] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0c:78:a8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '31e77685-b4dd-4810-80ef-24115ea9ea62', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8ec05620-75a4-4851-8cba-d4fc068e33e8', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1827.181104] env[62405]: DEBUG oslo.service.loopingcall [None req-298f3e3e-50d5-4c87-be64-626a9ecfe54d tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1827.181350] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 73c5b28f-d21d-4ffc-9e67-911e4fb4db66] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1827.181581] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-699478d2-f17b-4784-83ec-81334fff0145 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.205826] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1827.205826] env[62405]: value = "task-1947620" [ 1827.205826] env[62405]: _type = "Task" [ 1827.205826] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1827.215707] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947620, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1827.307124] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3ae74378-1806-4a46-8b31-966b987947fc tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.549s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1827.307124] env[62405]: DEBUG nova.compute.manager [None req-3ae74378-1806-4a46-8b31-966b987947fc tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Resized/migrated instance is powered off. Setting vm_state to 'stopped'. {{(pid=62405) _confirm_resize /opt/stack/nova/nova/compute/manager.py:5376}} [ 1827.310129] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f341608f-48d1-4559-b851-632f47c23d4a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.240s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1827.310410] env[62405]: DEBUG nova.objects.instance [None req-f341608f-48d1-4559-b851-632f47c23d4a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Lazy-loading 'resources' on Instance uuid 48554024-9b6f-44be-b21e-615b25cd790c {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1827.354281] env[62405]: DEBUG oslo_concurrency.lockutils [None req-60b2946e-7dfe-4867-b2ba-13141ef1e255 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Lock "b4b89cf6-4159-40fa-8b67-4d8bbf16eb32" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 43.134s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1827.514120] env[62405]: DEBUG nova.compute.manager [req-d4130444-5b05-42ad-af64-d3776e02017c req-1e3b4f8f-f076-4da5-9389-3364ee262d1a service nova] [instance: 73c5b28f-d21d-4ffc-9e67-911e4fb4db66] Received event network-changed-8ec05620-75a4-4851-8cba-d4fc068e33e8 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1827.514120] env[62405]: DEBUG nova.compute.manager [req-d4130444-5b05-42ad-af64-d3776e02017c req-1e3b4f8f-f076-4da5-9389-3364ee262d1a service nova] [instance: 73c5b28f-d21d-4ffc-9e67-911e4fb4db66] Refreshing instance network info cache due to event network-changed-8ec05620-75a4-4851-8cba-d4fc068e33e8. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1827.518212] env[62405]: DEBUG oslo_concurrency.lockutils [req-d4130444-5b05-42ad-af64-d3776e02017c req-1e3b4f8f-f076-4da5-9389-3364ee262d1a service nova] Acquiring lock "refresh_cache-73c5b28f-d21d-4ffc-9e67-911e4fb4db66" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1827.518212] env[62405]: DEBUG oslo_concurrency.lockutils [req-d4130444-5b05-42ad-af64-d3776e02017c req-1e3b4f8f-f076-4da5-9389-3364ee262d1a service nova] Acquired lock "refresh_cache-73c5b28f-d21d-4ffc-9e67-911e4fb4db66" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1827.518212] env[62405]: DEBUG nova.network.neutron [req-d4130444-5b05-42ad-af64-d3776e02017c req-1e3b4f8f-f076-4da5-9389-3364ee262d1a service nova] [instance: 73c5b28f-d21d-4ffc-9e67-911e4fb4db66] Refreshing network info cache for port 8ec05620-75a4-4851-8cba-d4fc068e33e8 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1827.661449] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ad668ba8-85b7-4e91-af69-8aab8973e109 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Lock "af174cbf-3555-42b0-bacd-033f9ff46f08" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.055s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1827.718471] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947620, 'name': CreateVM_Task, 'duration_secs': 0.408553} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1827.718652] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 73c5b28f-d21d-4ffc-9e67-911e4fb4db66] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1827.719387] env[62405]: DEBUG oslo_concurrency.lockutils [None req-298f3e3e-50d5-4c87-be64-626a9ecfe54d tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1827.719597] env[62405]: DEBUG oslo_concurrency.lockutils [None req-298f3e3e-50d5-4c87-be64-626a9ecfe54d tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1827.720022] env[62405]: DEBUG oslo_concurrency.lockutils [None req-298f3e3e-50d5-4c87-be64-626a9ecfe54d tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1827.720288] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e9d96fb7-6390-4ef3-bb7f-21cf5addf160 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.728022] env[62405]: DEBUG oslo_vmware.api [None req-298f3e3e-50d5-4c87-be64-626a9ecfe54d tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Waiting for the task: (returnval){ [ 1827.728022] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]526165fe-5170-fc93-45a4-372301f85831" [ 1827.728022] env[62405]: _type = "Task" [ 1827.728022] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1827.739786] env[62405]: DEBUG oslo_vmware.api [None req-298f3e3e-50d5-4c87-be64-626a9ecfe54d tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]526165fe-5170-fc93-45a4-372301f85831, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1827.872062] env[62405]: INFO nova.scheduler.client.report [None req-3ae74378-1806-4a46-8b31-966b987947fc tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Deleted allocation for migration f4e1eabb-c8ee-4e3c-b80a-8f1b540ce872 [ 1828.020865] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a87eee90-5581-4771-990c-7a8a5cfca6b9 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Acquiring lock "00158b10-4292-48f3-85a0-991af1dbc5f1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1828.021204] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a87eee90-5581-4771-990c-7a8a5cfca6b9 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Lock "00158b10-4292-48f3-85a0-991af1dbc5f1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1828.021594] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a87eee90-5581-4771-990c-7a8a5cfca6b9 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Acquiring lock "00158b10-4292-48f3-85a0-991af1dbc5f1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1828.024255] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a87eee90-5581-4771-990c-7a8a5cfca6b9 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Lock "00158b10-4292-48f3-85a0-991af1dbc5f1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1828.024255] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a87eee90-5581-4771-990c-7a8a5cfca6b9 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Lock "00158b10-4292-48f3-85a0-991af1dbc5f1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1828.029086] env[62405]: INFO nova.compute.manager [None req-a87eee90-5581-4771-990c-7a8a5cfca6b9 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: 00158b10-4292-48f3-85a0-991af1dbc5f1] Terminating instance [ 1828.030962] env[62405]: DEBUG oslo_concurrency.lockutils [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Acquiring lock "171910d2-02b8-4219-ae75-5cecccea1de3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1828.031229] env[62405]: DEBUG oslo_concurrency.lockutils [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Lock "171910d2-02b8-4219-ae75-5cecccea1de3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1828.120188] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b83bb652-4c7d-4264-9e9f-8656ba062a25 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquiring lock "af174cbf-3555-42b0-bacd-033f9ff46f08" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1828.120188] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b83bb652-4c7d-4264-9e9f-8656ba062a25 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Lock "af174cbf-3555-42b0-bacd-033f9ff46f08" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1828.120466] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b83bb652-4c7d-4264-9e9f-8656ba062a25 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquiring lock "af174cbf-3555-42b0-bacd-033f9ff46f08-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1828.120559] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b83bb652-4c7d-4264-9e9f-8656ba062a25 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Lock "af174cbf-3555-42b0-bacd-033f9ff46f08-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1828.120726] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b83bb652-4c7d-4264-9e9f-8656ba062a25 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Lock "af174cbf-3555-42b0-bacd-033f9ff46f08-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1828.122598] env[62405]: INFO nova.compute.manager [None req-b83bb652-4c7d-4264-9e9f-8656ba062a25 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: af174cbf-3555-42b0-bacd-033f9ff46f08] Terminating instance [ 1828.186468] env[62405]: DEBUG oslo_concurrency.lockutils [None req-99098b44-1282-474a-a097-79c63411ba82 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Acquiring lock "b4b89cf6-4159-40fa-8b67-4d8bbf16eb32" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1828.187295] env[62405]: DEBUG oslo_concurrency.lockutils [None req-99098b44-1282-474a-a097-79c63411ba82 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Lock "b4b89cf6-4159-40fa-8b67-4d8bbf16eb32" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1828.187295] env[62405]: DEBUG oslo_concurrency.lockutils [None req-99098b44-1282-474a-a097-79c63411ba82 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Acquiring lock "b4b89cf6-4159-40fa-8b67-4d8bbf16eb32-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1828.187295] env[62405]: DEBUG oslo_concurrency.lockutils [None req-99098b44-1282-474a-a097-79c63411ba82 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Lock "b4b89cf6-4159-40fa-8b67-4d8bbf16eb32-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1828.187295] env[62405]: DEBUG oslo_concurrency.lockutils [None req-99098b44-1282-474a-a097-79c63411ba82 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Lock "b4b89cf6-4159-40fa-8b67-4d8bbf16eb32-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1828.189438] env[62405]: INFO nova.compute.manager [None req-99098b44-1282-474a-a097-79c63411ba82 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: b4b89cf6-4159-40fa-8b67-4d8bbf16eb32] Terminating instance [ 1828.244754] env[62405]: DEBUG oslo_vmware.api [None req-298f3e3e-50d5-4c87-be64-626a9ecfe54d tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]526165fe-5170-fc93-45a4-372301f85831, 'name': SearchDatastore_Task, 'duration_secs': 0.012509} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1828.245322] env[62405]: DEBUG oslo_concurrency.lockutils [None req-298f3e3e-50d5-4c87-be64-626a9ecfe54d tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1828.245554] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-298f3e3e-50d5-4c87-be64-626a9ecfe54d tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] [instance: 73c5b28f-d21d-4ffc-9e67-911e4fb4db66] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1828.245781] env[62405]: DEBUG oslo_concurrency.lockutils [None req-298f3e3e-50d5-4c87-be64-626a9ecfe54d tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1828.245930] env[62405]: DEBUG oslo_concurrency.lockutils [None req-298f3e3e-50d5-4c87-be64-626a9ecfe54d tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1828.246125] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-298f3e3e-50d5-4c87-be64-626a9ecfe54d tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1828.246377] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-eb5e3018-7ac3-44b2-aca1-f464452efe3e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.258032] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-298f3e3e-50d5-4c87-be64-626a9ecfe54d tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1828.258032] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-298f3e3e-50d5-4c87-be64-626a9ecfe54d tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1828.259100] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6f8b1c0f-1c96-4c6c-b0c6-1530ec10e846 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.271070] env[62405]: DEBUG oslo_vmware.api [None req-298f3e3e-50d5-4c87-be64-626a9ecfe54d tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Waiting for the task: (returnval){ [ 1828.271070] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]522c615c-423a-4214-0852-291bec6839e7" [ 1828.271070] env[62405]: _type = "Task" [ 1828.271070] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1828.283059] env[62405]: DEBUG oslo_vmware.api [None req-298f3e3e-50d5-4c87-be64-626a9ecfe54d tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]522c615c-423a-4214-0852-291bec6839e7, 'name': SearchDatastore_Task, 'duration_secs': 0.010202} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1828.285168] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a6652b8-88eb-45ec-b1aa-a37152a9fd66 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.288184] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-765b75f1-918d-4850-956c-aa86b5ee9cc2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.297057] env[62405]: DEBUG oslo_vmware.api [None req-298f3e3e-50d5-4c87-be64-626a9ecfe54d tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Waiting for the task: (returnval){ [ 1828.297057] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]524419c4-aa1e-354a-dce7-7a889ba82a67" [ 1828.297057] env[62405]: _type = "Task" [ 1828.297057] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1828.298205] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2973b619-acc6-47da-96ec-290cb0bb8121 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.312499] env[62405]: DEBUG oslo_vmware.api [None req-298f3e3e-50d5-4c87-be64-626a9ecfe54d tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]524419c4-aa1e-354a-dce7-7a889ba82a67, 'name': SearchDatastore_Task, 'duration_secs': 0.013162} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1828.337049] env[62405]: DEBUG oslo_concurrency.lockutils [None req-298f3e3e-50d5-4c87-be64-626a9ecfe54d tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1828.337363] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-298f3e3e-50d5-4c87-be64-626a9ecfe54d tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 73c5b28f-d21d-4ffc-9e67-911e4fb4db66/73c5b28f-d21d-4ffc-9e67-911e4fb4db66.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1828.340602] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ea410293-072f-4f0b-b31b-3831f6206f86 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.343033] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c0ec224-39f1-40f6-bc19-a4a79656633a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.353373] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9236ac5f-31ab-4a7e-8161-27e1452b31d8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.358053] env[62405]: DEBUG oslo_vmware.api [None req-298f3e3e-50d5-4c87-be64-626a9ecfe54d tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Waiting for the task: (returnval){ [ 1828.358053] env[62405]: value = "task-1947621" [ 1828.358053] env[62405]: _type = "Task" [ 1828.358053] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1828.370518] env[62405]: DEBUG nova.compute.provider_tree [None req-f341608f-48d1-4559-b851-632f47c23d4a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1828.376851] env[62405]: DEBUG oslo_vmware.api [None req-298f3e3e-50d5-4c87-be64-626a9ecfe54d tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Task: {'id': task-1947621, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1828.378860] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3ae74378-1806-4a46-8b31-966b987947fc tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Lock "15218373-ffa5-49ce-b604-423b7fc5fb35" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 20.186s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1828.491221] env[62405]: DEBUG nova.network.neutron [req-d4130444-5b05-42ad-af64-d3776e02017c req-1e3b4f8f-f076-4da5-9389-3364ee262d1a service nova] [instance: 73c5b28f-d21d-4ffc-9e67-911e4fb4db66] Updated VIF entry in instance network info cache for port 8ec05620-75a4-4851-8cba-d4fc068e33e8. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1828.491840] env[62405]: DEBUG nova.network.neutron [req-d4130444-5b05-42ad-af64-d3776e02017c req-1e3b4f8f-f076-4da5-9389-3364ee262d1a service nova] [instance: 73c5b28f-d21d-4ffc-9e67-911e4fb4db66] Updating instance_info_cache with network_info: [{"id": "8ec05620-75a4-4851-8cba-d4fc068e33e8", "address": "fa:16:3e:0c:78:a8", "network": {"id": "9e3e6e3d-df77-428f-b577-e4a42e82ec43", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.43", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "69dc3a146cd14230b1180689e2fea090", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31e77685-b4dd-4810-80ef-24115ea9ea62", "external-id": "nsx-vlan-transportzone-56", "segmentation_id": 56, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8ec05620-75", "ovs_interfaceid": "8ec05620-75a4-4851-8cba-d4fc068e33e8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1828.538033] env[62405]: DEBUG nova.compute.manager [None req-a87eee90-5581-4771-990c-7a8a5cfca6b9 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: 00158b10-4292-48f3-85a0-991af1dbc5f1] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1828.538229] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-a87eee90-5581-4771-990c-7a8a5cfca6b9 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: 00158b10-4292-48f3-85a0-991af1dbc5f1] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1828.538662] env[62405]: DEBUG nova.compute.manager [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] [instance: 171910d2-02b8-4219-ae75-5cecccea1de3] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1828.549087] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5215d15-c749-41b7-9e4a-fb5d7f466781 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.557660] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-a87eee90-5581-4771-990c-7a8a5cfca6b9 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: 00158b10-4292-48f3-85a0-991af1dbc5f1] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1828.558035] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-645a18ff-838f-4cbf-87b6-262da1e59a74 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.571956] env[62405]: DEBUG oslo_vmware.api [None req-a87eee90-5581-4771-990c-7a8a5cfca6b9 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Waiting for the task: (returnval){ [ 1828.571956] env[62405]: value = "task-1947622" [ 1828.571956] env[62405]: _type = "Task" [ 1828.571956] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1828.583932] env[62405]: DEBUG oslo_vmware.api [None req-a87eee90-5581-4771-990c-7a8a5cfca6b9 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Task: {'id': task-1947622, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1828.626357] env[62405]: DEBUG nova.compute.manager [None req-b83bb652-4c7d-4264-9e9f-8656ba062a25 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: af174cbf-3555-42b0-bacd-033f9ff46f08] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1828.626589] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-b83bb652-4c7d-4264-9e9f-8656ba062a25 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: af174cbf-3555-42b0-bacd-033f9ff46f08] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1828.627563] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc7f685f-f857-44a6-89bf-b80f1fbcdf10 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.637203] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-b83bb652-4c7d-4264-9e9f-8656ba062a25 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: af174cbf-3555-42b0-bacd-033f9ff46f08] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1828.637511] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5d29c8b0-ef4d-47e7-81dd-87fbb8ddcd3d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.693034] env[62405]: DEBUG nova.compute.manager [None req-99098b44-1282-474a-a097-79c63411ba82 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: b4b89cf6-4159-40fa-8b67-4d8bbf16eb32] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1828.693304] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-99098b44-1282-474a-a097-79c63411ba82 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: b4b89cf6-4159-40fa-8b67-4d8bbf16eb32] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1828.694260] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bd551e3-713f-4349-9597-11aae3bf1623 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.705990] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-99098b44-1282-474a-a097-79c63411ba82 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: b4b89cf6-4159-40fa-8b67-4d8bbf16eb32] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1828.706324] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-90eef6d5-66a1-4894-bcc4-c32fb9ce89aa {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.715278] env[62405]: DEBUG oslo_vmware.api [None req-99098b44-1282-474a-a097-79c63411ba82 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Waiting for the task: (returnval){ [ 1828.715278] env[62405]: value = "task-1947624" [ 1828.715278] env[62405]: _type = "Task" [ 1828.715278] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1828.726664] env[62405]: DEBUG nova.objects.instance [None req-16eb3707-6323-4ba5-841d-44e785ce893d tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Lazy-loading 'flavor' on Instance uuid 15218373-ffa5-49ce-b604-423b7fc5fb35 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1828.728258] env[62405]: DEBUG oslo_vmware.api [None req-99098b44-1282-474a-a097-79c63411ba82 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Task: {'id': task-1947624, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1828.868632] env[62405]: DEBUG oslo_vmware.api [None req-298f3e3e-50d5-4c87-be64-626a9ecfe54d tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Task: {'id': task-1947621, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1828.876316] env[62405]: DEBUG nova.scheduler.client.report [None req-f341608f-48d1-4559-b851-632f47c23d4a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1828.948729] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-b83bb652-4c7d-4264-9e9f-8656ba062a25 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: af174cbf-3555-42b0-bacd-033f9ff46f08] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1828.948892] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-b83bb652-4c7d-4264-9e9f-8656ba062a25 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: af174cbf-3555-42b0-bacd-033f9ff46f08] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1828.949504] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-b83bb652-4c7d-4264-9e9f-8656ba062a25 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Deleting the datastore file [datastore1] af174cbf-3555-42b0-bacd-033f9ff46f08 {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1828.949504] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-264d593e-a990-4271-9cc9-f881eb13c1d8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.958920] env[62405]: DEBUG oslo_vmware.api [None req-b83bb652-4c7d-4264-9e9f-8656ba062a25 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for the task: (returnval){ [ 1828.958920] env[62405]: value = "task-1947625" [ 1828.958920] env[62405]: _type = "Task" [ 1828.958920] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1828.969719] env[62405]: DEBUG oslo_vmware.api [None req-b83bb652-4c7d-4264-9e9f-8656ba062a25 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947625, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1828.994469] env[62405]: DEBUG oslo_concurrency.lockutils [req-d4130444-5b05-42ad-af64-d3776e02017c req-1e3b4f8f-f076-4da5-9389-3364ee262d1a service nova] Releasing lock "refresh_cache-73c5b28f-d21d-4ffc-9e67-911e4fb4db66" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1829.084163] env[62405]: DEBUG oslo_vmware.api [None req-a87eee90-5581-4771-990c-7a8a5cfca6b9 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Task: {'id': task-1947622, 'name': PowerOffVM_Task, 'duration_secs': 0.483904} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1829.085284] env[62405]: DEBUG oslo_concurrency.lockutils [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1829.085636] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-a87eee90-5581-4771-990c-7a8a5cfca6b9 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: 00158b10-4292-48f3-85a0-991af1dbc5f1] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1829.088540] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-a87eee90-5581-4771-990c-7a8a5cfca6b9 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: 00158b10-4292-48f3-85a0-991af1dbc5f1] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1829.088540] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3456231b-2209-46ec-a2c0-e914ff86c3e4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.225752] env[62405]: DEBUG oslo_vmware.api [None req-99098b44-1282-474a-a097-79c63411ba82 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Task: {'id': task-1947624, 'name': PowerOffVM_Task, 'duration_secs': 0.344264} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1829.226198] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-99098b44-1282-474a-a097-79c63411ba82 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: b4b89cf6-4159-40fa-8b67-4d8bbf16eb32] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1829.226198] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-99098b44-1282-474a-a097-79c63411ba82 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: b4b89cf6-4159-40fa-8b67-4d8bbf16eb32] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1829.226446] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8670acca-e93a-4b9d-b40b-38d4d31fe0da {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.231805] env[62405]: DEBUG oslo_concurrency.lockutils [None req-16eb3707-6323-4ba5-841d-44e785ce893d tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Acquiring lock "refresh_cache-15218373-ffa5-49ce-b604-423b7fc5fb35" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1829.232742] env[62405]: DEBUG oslo_concurrency.lockutils [None req-16eb3707-6323-4ba5-841d-44e785ce893d tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Acquired lock "refresh_cache-15218373-ffa5-49ce-b604-423b7fc5fb35" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1829.232742] env[62405]: DEBUG nova.network.neutron [None req-16eb3707-6323-4ba5-841d-44e785ce893d tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1829.232742] env[62405]: DEBUG nova.objects.instance [None req-16eb3707-6323-4ba5-841d-44e785ce893d tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Lazy-loading 'info_cache' on Instance uuid 15218373-ffa5-49ce-b604-423b7fc5fb35 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1829.279142] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-a87eee90-5581-4771-990c-7a8a5cfca6b9 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: 00158b10-4292-48f3-85a0-991af1dbc5f1] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1829.279347] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-a87eee90-5581-4771-990c-7a8a5cfca6b9 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: 00158b10-4292-48f3-85a0-991af1dbc5f1] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1829.279544] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-a87eee90-5581-4771-990c-7a8a5cfca6b9 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Deleting the datastore file [datastore1] 00158b10-4292-48f3-85a0-991af1dbc5f1 {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1829.280280] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2deab118-6005-478e-87fc-9f28f95a2f66 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.289189] env[62405]: DEBUG oslo_vmware.api [None req-a87eee90-5581-4771-990c-7a8a5cfca6b9 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Waiting for the task: (returnval){ [ 1829.289189] env[62405]: value = "task-1947628" [ 1829.289189] env[62405]: _type = "Task" [ 1829.289189] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1829.300127] env[62405]: DEBUG oslo_vmware.api [None req-a87eee90-5581-4771-990c-7a8a5cfca6b9 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Task: {'id': task-1947628, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1829.368321] env[62405]: DEBUG oslo_vmware.api [None req-298f3e3e-50d5-4c87-be64-626a9ecfe54d tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Task: {'id': task-1947621, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.594797} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1829.368576] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-298f3e3e-50d5-4c87-be64-626a9ecfe54d tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 73c5b28f-d21d-4ffc-9e67-911e4fb4db66/73c5b28f-d21d-4ffc-9e67-911e4fb4db66.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1829.368781] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-298f3e3e-50d5-4c87-be64-626a9ecfe54d tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] [instance: 73c5b28f-d21d-4ffc-9e67-911e4fb4db66] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1829.369035] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0bb30166-729d-4308-add8-92a8fe6edbbc {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.376083] env[62405]: DEBUG oslo_vmware.api [None req-298f3e3e-50d5-4c87-be64-626a9ecfe54d tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Waiting for the task: (returnval){ [ 1829.376083] env[62405]: value = "task-1947629" [ 1829.376083] env[62405]: _type = "Task" [ 1829.376083] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1829.379947] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f341608f-48d1-4559-b851-632f47c23d4a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.070s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1829.382458] env[62405]: DEBUG oslo_concurrency.lockutils [None req-dff5bdb5-848f-4927-b3b4-7be71adbaf65 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.357s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1829.383388] env[62405]: DEBUG nova.objects.instance [None req-dff5bdb5-848f-4927-b3b4-7be71adbaf65 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Lazy-loading 'resources' on Instance uuid ff8731d6-3c55-4ddc-aeb1-308d72313881 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1829.389723] env[62405]: DEBUG oslo_vmware.api [None req-298f3e3e-50d5-4c87-be64-626a9ecfe54d tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Task: {'id': task-1947629, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1829.402161] env[62405]: INFO nova.scheduler.client.report [None req-f341608f-48d1-4559-b851-632f47c23d4a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Deleted allocations for instance 48554024-9b6f-44be-b21e-615b25cd790c [ 1829.475549] env[62405]: DEBUG oslo_vmware.api [None req-b83bb652-4c7d-4264-9e9f-8656ba062a25 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947625, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.184075} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1829.475937] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-b83bb652-4c7d-4264-9e9f-8656ba062a25 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1829.476250] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-b83bb652-4c7d-4264-9e9f-8656ba062a25 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: af174cbf-3555-42b0-bacd-033f9ff46f08] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1829.477077] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-b83bb652-4c7d-4264-9e9f-8656ba062a25 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: af174cbf-3555-42b0-bacd-033f9ff46f08] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1829.477077] env[62405]: INFO nova.compute.manager [None req-b83bb652-4c7d-4264-9e9f-8656ba062a25 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: af174cbf-3555-42b0-bacd-033f9ff46f08] Took 0.85 seconds to destroy the instance on the hypervisor. [ 1829.477077] env[62405]: DEBUG oslo.service.loopingcall [None req-b83bb652-4c7d-4264-9e9f-8656ba062a25 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1829.477284] env[62405]: DEBUG nova.compute.manager [-] [instance: af174cbf-3555-42b0-bacd-033f9ff46f08] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1829.477284] env[62405]: DEBUG nova.network.neutron [-] [instance: af174cbf-3555-42b0-bacd-033f9ff46f08] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1829.632949] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-99098b44-1282-474a-a097-79c63411ba82 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: b4b89cf6-4159-40fa-8b67-4d8bbf16eb32] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1829.633293] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-99098b44-1282-474a-a097-79c63411ba82 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: b4b89cf6-4159-40fa-8b67-4d8bbf16eb32] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1829.633487] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-99098b44-1282-474a-a097-79c63411ba82 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Deleting the datastore file [datastore1] b4b89cf6-4159-40fa-8b67-4d8bbf16eb32 {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1829.633764] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7e5a0d6a-0147-4c33-bc9f-c0f22d6f45e7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.643351] env[62405]: DEBUG oslo_vmware.api [None req-99098b44-1282-474a-a097-79c63411ba82 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Waiting for the task: (returnval){ [ 1829.643351] env[62405]: value = "task-1947630" [ 1829.643351] env[62405]: _type = "Task" [ 1829.643351] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1829.654374] env[62405]: DEBUG oslo_vmware.api [None req-99098b44-1282-474a-a097-79c63411ba82 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Task: {'id': task-1947630, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1829.735651] env[62405]: DEBUG nova.objects.base [None req-16eb3707-6323-4ba5-841d-44e785ce893d tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Object Instance<15218373-ffa5-49ce-b604-423b7fc5fb35> lazy-loaded attributes: flavor,info_cache {{(pid=62405) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1829.804265] env[62405]: DEBUG oslo_vmware.api [None req-a87eee90-5581-4771-990c-7a8a5cfca6b9 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Task: {'id': task-1947628, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.431549} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1829.804265] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-a87eee90-5581-4771-990c-7a8a5cfca6b9 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1829.804265] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-a87eee90-5581-4771-990c-7a8a5cfca6b9 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: 00158b10-4292-48f3-85a0-991af1dbc5f1] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1829.804265] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-a87eee90-5581-4771-990c-7a8a5cfca6b9 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: 00158b10-4292-48f3-85a0-991af1dbc5f1] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1829.804265] env[62405]: INFO nova.compute.manager [None req-a87eee90-5581-4771-990c-7a8a5cfca6b9 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: 00158b10-4292-48f3-85a0-991af1dbc5f1] Took 1.27 seconds to destroy the instance on the hypervisor. [ 1829.804265] env[62405]: DEBUG oslo.service.loopingcall [None req-a87eee90-5581-4771-990c-7a8a5cfca6b9 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1829.804265] env[62405]: DEBUG nova.compute.manager [-] [instance: 00158b10-4292-48f3-85a0-991af1dbc5f1] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1829.804265] env[62405]: DEBUG nova.network.neutron [-] [instance: 00158b10-4292-48f3-85a0-991af1dbc5f1] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1829.878197] env[62405]: DEBUG nova.compute.manager [req-4baf9478-c82d-493e-8c74-7dccdff0e667 req-07abc67e-401f-46aa-995b-2169d7d829b7 service nova] [instance: af174cbf-3555-42b0-bacd-033f9ff46f08] Received event network-vif-deleted-bfaa42fd-a41b-4fd9-a12f-6b8599602de7 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1829.878197] env[62405]: INFO nova.compute.manager [req-4baf9478-c82d-493e-8c74-7dccdff0e667 req-07abc67e-401f-46aa-995b-2169d7d829b7 service nova] [instance: af174cbf-3555-42b0-bacd-033f9ff46f08] Neutron deleted interface bfaa42fd-a41b-4fd9-a12f-6b8599602de7; detaching it from the instance and deleting it from the info cache [ 1829.878197] env[62405]: DEBUG nova.network.neutron [req-4baf9478-c82d-493e-8c74-7dccdff0e667 req-07abc67e-401f-46aa-995b-2169d7d829b7 service nova] [instance: af174cbf-3555-42b0-bacd-033f9ff46f08] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1829.891313] env[62405]: DEBUG oslo_vmware.api [None req-298f3e3e-50d5-4c87-be64-626a9ecfe54d tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Task: {'id': task-1947629, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.106113} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1829.892235] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-298f3e3e-50d5-4c87-be64-626a9ecfe54d tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] [instance: 73c5b28f-d21d-4ffc-9e67-911e4fb4db66] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1829.892949] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edd8dc05-f246-456c-9139-140ece264a07 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.912704] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f341608f-48d1-4559-b851-632f47c23d4a tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Lock "48554024-9b6f-44be-b21e-615b25cd790c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.659s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1829.923541] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-298f3e3e-50d5-4c87-be64-626a9ecfe54d tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] [instance: 73c5b28f-d21d-4ffc-9e67-911e4fb4db66] Reconfiguring VM instance instance-0000004e to attach disk [datastore1] 73c5b28f-d21d-4ffc-9e67-911e4fb4db66/73c5b28f-d21d-4ffc-9e67-911e4fb4db66.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1829.927858] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d3bac63e-5636-4c71-b4b4-d6446b1e91ed {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.950899] env[62405]: DEBUG oslo_vmware.api [None req-298f3e3e-50d5-4c87-be64-626a9ecfe54d tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Waiting for the task: (returnval){ [ 1829.950899] env[62405]: value = "task-1947631" [ 1829.950899] env[62405]: _type = "Task" [ 1829.950899] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1829.971593] env[62405]: DEBUG oslo_vmware.api [None req-298f3e3e-50d5-4c87-be64-626a9ecfe54d tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Task: {'id': task-1947631, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1830.085282] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4317e200-d3a7-46d4-abcb-a7a0c2661d46 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Acquiring lock "9aa9e0de-7314-4d8b-8e9f-b6d330cae914" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1830.085913] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4317e200-d3a7-46d4-abcb-a7a0c2661d46 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Lock "9aa9e0de-7314-4d8b-8e9f-b6d330cae914" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1830.085913] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4317e200-d3a7-46d4-abcb-a7a0c2661d46 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Acquiring lock "9aa9e0de-7314-4d8b-8e9f-b6d330cae914-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1830.086186] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4317e200-d3a7-46d4-abcb-a7a0c2661d46 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Lock "9aa9e0de-7314-4d8b-8e9f-b6d330cae914-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1830.086186] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4317e200-d3a7-46d4-abcb-a7a0c2661d46 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Lock "9aa9e0de-7314-4d8b-8e9f-b6d330cae914-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1830.088611] env[62405]: INFO nova.compute.manager [None req-4317e200-d3a7-46d4-abcb-a7a0c2661d46 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 9aa9e0de-7314-4d8b-8e9f-b6d330cae914] Terminating instance [ 1830.123814] env[62405]: DEBUG nova.compute.manager [req-b88e5a49-cb65-417e-8dce-a3b84de222e4 req-653b4856-5f8a-49f5-ad43-94a4d232cda7 service nova] [instance: 00158b10-4292-48f3-85a0-991af1dbc5f1] Received event network-vif-deleted-556c9e65-2036-4d78-9b15-857b8261e634 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1830.124053] env[62405]: INFO nova.compute.manager [req-b88e5a49-cb65-417e-8dce-a3b84de222e4 req-653b4856-5f8a-49f5-ad43-94a4d232cda7 service nova] [instance: 00158b10-4292-48f3-85a0-991af1dbc5f1] Neutron deleted interface 556c9e65-2036-4d78-9b15-857b8261e634; detaching it from the instance and deleting it from the info cache [ 1830.124232] env[62405]: DEBUG nova.network.neutron [req-b88e5a49-cb65-417e-8dce-a3b84de222e4 req-653b4856-5f8a-49f5-ad43-94a4d232cda7 service nova] [instance: 00158b10-4292-48f3-85a0-991af1dbc5f1] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1830.153446] env[62405]: DEBUG oslo_vmware.api [None req-99098b44-1282-474a-a097-79c63411ba82 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Task: {'id': task-1947630, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.202109} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1830.156068] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-99098b44-1282-474a-a097-79c63411ba82 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1830.156285] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-99098b44-1282-474a-a097-79c63411ba82 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: b4b89cf6-4159-40fa-8b67-4d8bbf16eb32] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1830.156482] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-99098b44-1282-474a-a097-79c63411ba82 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: b4b89cf6-4159-40fa-8b67-4d8bbf16eb32] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1830.156670] env[62405]: INFO nova.compute.manager [None req-99098b44-1282-474a-a097-79c63411ba82 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] [instance: b4b89cf6-4159-40fa-8b67-4d8bbf16eb32] Took 1.46 seconds to destroy the instance on the hypervisor. [ 1830.156956] env[62405]: DEBUG oslo.service.loopingcall [None req-99098b44-1282-474a-a097-79c63411ba82 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1830.157568] env[62405]: DEBUG nova.compute.manager [-] [instance: b4b89cf6-4159-40fa-8b67-4d8bbf16eb32] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1830.157667] env[62405]: DEBUG nova.network.neutron [-] [instance: b4b89cf6-4159-40fa-8b67-4d8bbf16eb32] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1830.190354] env[62405]: DEBUG nova.compute.manager [None req-fb0ee8ac-ffc6-4459-9c34-a47cd9bfcb1f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1830.195538] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5797925f-00d4-410d-bd58-69b88996e4a1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.316642] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6194c366-1356-49b7-9fa4-46c1dcceca80 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.325889] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95928b44-7625-4010-b5ac-f4c13a601ab1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.363151] env[62405]: DEBUG nova.network.neutron [-] [instance: af174cbf-3555-42b0-bacd-033f9ff46f08] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1830.367684] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-542f9673-9ece-4e81-bff1-4434d0f9767c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.376987] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1632976-be93-4031-91eb-11c642da2ed2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.385024] env[62405]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e05cf955-a024-40c7-ac58-8218e480a284 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.397742] env[62405]: DEBUG nova.compute.provider_tree [None req-dff5bdb5-848f-4927-b3b4-7be71adbaf65 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1830.407164] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd8ee645-a011-4068-a82a-08b5e1aff00f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.443863] env[62405]: DEBUG nova.compute.manager [req-4baf9478-c82d-493e-8c74-7dccdff0e667 req-07abc67e-401f-46aa-995b-2169d7d829b7 service nova] [instance: af174cbf-3555-42b0-bacd-033f9ff46f08] Detach interface failed, port_id=bfaa42fd-a41b-4fd9-a12f-6b8599602de7, reason: Instance af174cbf-3555-42b0-bacd-033f9ff46f08 could not be found. {{(pid=62405) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11483}} [ 1830.460667] env[62405]: DEBUG oslo_vmware.api [None req-298f3e3e-50d5-4c87-be64-626a9ecfe54d tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Task: {'id': task-1947631, 'name': ReconfigVM_Task, 'duration_secs': 0.323432} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1830.461084] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-298f3e3e-50d5-4c87-be64-626a9ecfe54d tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] [instance: 73c5b28f-d21d-4ffc-9e67-911e4fb4db66] Reconfigured VM instance instance-0000004e to attach disk [datastore1] 73c5b28f-d21d-4ffc-9e67-911e4fb4db66/73c5b28f-d21d-4ffc-9e67-911e4fb4db66.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1830.461768] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-62c2bd99-9339-454d-a6a4-0836e5090d2a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.468864] env[62405]: DEBUG oslo_vmware.api [None req-298f3e3e-50d5-4c87-be64-626a9ecfe54d tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Waiting for the task: (returnval){ [ 1830.468864] env[62405]: value = "task-1947632" [ 1830.468864] env[62405]: _type = "Task" [ 1830.468864] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1830.476944] env[62405]: DEBUG oslo_vmware.api [None req-298f3e3e-50d5-4c87-be64-626a9ecfe54d tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Task: {'id': task-1947632, 'name': Rename_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1830.596599] env[62405]: DEBUG nova.compute.manager [None req-4317e200-d3a7-46d4-abcb-a7a0c2661d46 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 9aa9e0de-7314-4d8b-8e9f-b6d330cae914] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1830.596956] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-4317e200-d3a7-46d4-abcb-a7a0c2661d46 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 9aa9e0de-7314-4d8b-8e9f-b6d330cae914] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1830.598247] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-013daa4e-2c9d-4099-9bf6-e4468258617c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.602706] env[62405]: DEBUG nova.network.neutron [-] [instance: 00158b10-4292-48f3-85a0-991af1dbc5f1] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1830.609827] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-4317e200-d3a7-46d4-abcb-a7a0c2661d46 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 9aa9e0de-7314-4d8b-8e9f-b6d330cae914] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1830.610409] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-891d8211-8ad1-481e-95ba-6ea29907536a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.617708] env[62405]: DEBUG oslo_vmware.api [None req-4317e200-d3a7-46d4-abcb-a7a0c2661d46 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Waiting for the task: (returnval){ [ 1830.617708] env[62405]: value = "task-1947633" [ 1830.617708] env[62405]: _type = "Task" [ 1830.617708] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1830.626834] env[62405]: DEBUG oslo_vmware.api [None req-4317e200-d3a7-46d4-abcb-a7a0c2661d46 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Task: {'id': task-1947633, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1830.631818] env[62405]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b33344c1-6873-4e46-8570-0b7a30242223 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.641427] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f4fa3e9-2603-4a61-aee5-dd2616ad2217 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.679083] env[62405]: DEBUG nova.compute.manager [req-b88e5a49-cb65-417e-8dce-a3b84de222e4 req-653b4856-5f8a-49f5-ad43-94a4d232cda7 service nova] [instance: 00158b10-4292-48f3-85a0-991af1dbc5f1] Detach interface failed, port_id=556c9e65-2036-4d78-9b15-857b8261e634, reason: Instance 00158b10-4292-48f3-85a0-991af1dbc5f1 could not be found. {{(pid=62405) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11483}} [ 1830.707511] env[62405]: INFO nova.compute.manager [None req-fb0ee8ac-ffc6-4459-9c34-a47cd9bfcb1f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d] instance snapshotting [ 1830.710285] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2b0f60d-eae8-437f-8df9-b5be7e4f624c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.733463] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca357f58-24e4-4fb4-9526-eb226acac50f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.867958] env[62405]: INFO nova.compute.manager [-] [instance: af174cbf-3555-42b0-bacd-033f9ff46f08] Took 1.39 seconds to deallocate network for instance. [ 1830.884111] env[62405]: DEBUG nova.network.neutron [-] [instance: b4b89cf6-4159-40fa-8b67-4d8bbf16eb32] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1830.887573] env[62405]: DEBUG nova.network.neutron [None req-16eb3707-6323-4ba5-841d-44e785ce893d tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Updating instance_info_cache with network_info: [{"id": "7e786917-4e46-4359-899e-afc1456451ae", "address": "fa:16:3e:75:14:e2", "network": {"id": "3ca0a5a2-a18f-47fa-9d7b-0e27aa414878", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1312490542-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.142", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f3b50cc219314108945bfc8b2c21849a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7edb7c08-2fae-4df5-9ec6-5ccf06d7e337", "external-id": "nsx-vlan-transportzone-309", "segmentation_id": 309, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e786917-4e", "ovs_interfaceid": "7e786917-4e46-4359-899e-afc1456451ae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1830.901364] env[62405]: DEBUG nova.scheduler.client.report [None req-dff5bdb5-848f-4927-b3b4-7be71adbaf65 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1830.980535] env[62405]: DEBUG oslo_vmware.api [None req-298f3e3e-50d5-4c87-be64-626a9ecfe54d tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Task: {'id': task-1947632, 'name': Rename_Task, 'duration_secs': 0.141364} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1830.980726] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-298f3e3e-50d5-4c87-be64-626a9ecfe54d tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] [instance: 73c5b28f-d21d-4ffc-9e67-911e4fb4db66] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1830.981335] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7292d785-354a-4ac2-977d-059e1abb7b55 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.991164] env[62405]: DEBUG oslo_vmware.api [None req-298f3e3e-50d5-4c87-be64-626a9ecfe54d tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Waiting for the task: (returnval){ [ 1830.991164] env[62405]: value = "task-1947634" [ 1830.991164] env[62405]: _type = "Task" [ 1830.991164] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1830.999204] env[62405]: DEBUG oslo_vmware.api [None req-298f3e3e-50d5-4c87-be64-626a9ecfe54d tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Task: {'id': task-1947634, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1831.106416] env[62405]: INFO nova.compute.manager [-] [instance: 00158b10-4292-48f3-85a0-991af1dbc5f1] Took 1.30 seconds to deallocate network for instance. [ 1831.128556] env[62405]: DEBUG oslo_vmware.api [None req-4317e200-d3a7-46d4-abcb-a7a0c2661d46 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Task: {'id': task-1947633, 'name': PowerOffVM_Task, 'duration_secs': 0.223436} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1831.128913] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-4317e200-d3a7-46d4-abcb-a7a0c2661d46 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 9aa9e0de-7314-4d8b-8e9f-b6d330cae914] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1831.129062] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-4317e200-d3a7-46d4-abcb-a7a0c2661d46 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 9aa9e0de-7314-4d8b-8e9f-b6d330cae914] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1831.129343] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cdd13796-4ce9-4035-9983-d0679941beda {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.245375] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-4317e200-d3a7-46d4-abcb-a7a0c2661d46 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 9aa9e0de-7314-4d8b-8e9f-b6d330cae914] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1831.245605] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-4317e200-d3a7-46d4-abcb-a7a0c2661d46 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 9aa9e0de-7314-4d8b-8e9f-b6d330cae914] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1831.245794] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-4317e200-d3a7-46d4-abcb-a7a0c2661d46 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Deleting the datastore file [datastore1] 9aa9e0de-7314-4d8b-8e9f-b6d330cae914 {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1831.246087] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c9fe5471-7d85-445a-b6b8-fc3e7c63524a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.248826] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-fb0ee8ac-ffc6-4459-9c34-a47cd9bfcb1f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d] Creating Snapshot of the VM instance {{(pid=62405) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1831.249095] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-a4132f6d-1ca5-46ed-9040-cf640b76d406 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.256278] env[62405]: DEBUG oslo_vmware.api [None req-fb0ee8ac-ffc6-4459-9c34-a47cd9bfcb1f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Waiting for the task: (returnval){ [ 1831.256278] env[62405]: value = "task-1947637" [ 1831.256278] env[62405]: _type = "Task" [ 1831.256278] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1831.257672] env[62405]: DEBUG oslo_vmware.api [None req-4317e200-d3a7-46d4-abcb-a7a0c2661d46 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Waiting for the task: (returnval){ [ 1831.257672] env[62405]: value = "task-1947636" [ 1831.257672] env[62405]: _type = "Task" [ 1831.257672] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1831.268900] env[62405]: DEBUG oslo_vmware.api [None req-fb0ee8ac-ffc6-4459-9c34-a47cd9bfcb1f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': task-1947637, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1831.271889] env[62405]: DEBUG oslo_vmware.api [None req-4317e200-d3a7-46d4-abcb-a7a0c2661d46 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Task: {'id': task-1947636, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1831.374825] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b83bb652-4c7d-4264-9e9f-8656ba062a25 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1831.386527] env[62405]: INFO nova.compute.manager [-] [instance: b4b89cf6-4159-40fa-8b67-4d8bbf16eb32] Took 1.23 seconds to deallocate network for instance. [ 1831.392256] env[62405]: DEBUG oslo_concurrency.lockutils [None req-16eb3707-6323-4ba5-841d-44e785ce893d tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Releasing lock "refresh_cache-15218373-ffa5-49ce-b604-423b7fc5fb35" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1831.407320] env[62405]: DEBUG oslo_concurrency.lockutils [None req-dff5bdb5-848f-4927-b3b4-7be71adbaf65 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.025s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1831.410991] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3f2d050b-8bbd-4aa5-9294-b7a5804eb0e5 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 17.958s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1831.437181] env[62405]: INFO nova.scheduler.client.report [None req-dff5bdb5-848f-4927-b3b4-7be71adbaf65 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Deleted allocations for instance ff8731d6-3c55-4ddc-aeb1-308d72313881 [ 1831.502914] env[62405]: DEBUG oslo_vmware.api [None req-298f3e3e-50d5-4c87-be64-626a9ecfe54d tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Task: {'id': task-1947634, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1831.613421] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a87eee90-5581-4771-990c-7a8a5cfca6b9 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1831.771325] env[62405]: DEBUG oslo_vmware.api [None req-fb0ee8ac-ffc6-4459-9c34-a47cd9bfcb1f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': task-1947637, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1831.774620] env[62405]: DEBUG oslo_vmware.api [None req-4317e200-d3a7-46d4-abcb-a7a0c2661d46 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Task: {'id': task-1947636, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.162429} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1831.775033] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-4317e200-d3a7-46d4-abcb-a7a0c2661d46 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1831.775488] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-4317e200-d3a7-46d4-abcb-a7a0c2661d46 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 9aa9e0de-7314-4d8b-8e9f-b6d330cae914] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1831.775786] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-4317e200-d3a7-46d4-abcb-a7a0c2661d46 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 9aa9e0de-7314-4d8b-8e9f-b6d330cae914] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1831.776112] env[62405]: INFO nova.compute.manager [None req-4317e200-d3a7-46d4-abcb-a7a0c2661d46 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 9aa9e0de-7314-4d8b-8e9f-b6d330cae914] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1831.777190] env[62405]: DEBUG oslo.service.loopingcall [None req-4317e200-d3a7-46d4-abcb-a7a0c2661d46 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1831.777190] env[62405]: DEBUG nova.compute.manager [-] [instance: 9aa9e0de-7314-4d8b-8e9f-b6d330cae914] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1831.777190] env[62405]: DEBUG nova.network.neutron [-] [instance: 9aa9e0de-7314-4d8b-8e9f-b6d330cae914] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1831.893410] env[62405]: DEBUG oslo_concurrency.lockutils [None req-99098b44-1282-474a-a097-79c63411ba82 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1831.947648] env[62405]: DEBUG oslo_concurrency.lockutils [None req-dff5bdb5-848f-4927-b3b4-7be71adbaf65 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Lock "ff8731d6-3c55-4ddc-aeb1-308d72313881" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.825s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1832.003425] env[62405]: DEBUG oslo_vmware.api [None req-298f3e3e-50d5-4c87-be64-626a9ecfe54d tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Task: {'id': task-1947634, 'name': PowerOnVM_Task, 'duration_secs': 0.583927} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1832.004035] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-298f3e3e-50d5-4c87-be64-626a9ecfe54d tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] [instance: 73c5b28f-d21d-4ffc-9e67-911e4fb4db66] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1832.004259] env[62405]: INFO nova.compute.manager [None req-298f3e3e-50d5-4c87-be64-626a9ecfe54d tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] [instance: 73c5b28f-d21d-4ffc-9e67-911e4fb4db66] Took 7.39 seconds to spawn the instance on the hypervisor. [ 1832.004440] env[62405]: DEBUG nova.compute.manager [None req-298f3e3e-50d5-4c87-be64-626a9ecfe54d tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] [instance: 73c5b28f-d21d-4ffc-9e67-911e4fb4db66] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1832.005295] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3745304-700a-4392-8c8b-622e499fec65 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.271862] env[62405]: DEBUG oslo_vmware.api [None req-fb0ee8ac-ffc6-4459-9c34-a47cd9bfcb1f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': task-1947637, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1832.298644] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ac8cbf3-1d01-49e2-a521-774a679102f4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.308333] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-555a461c-e2de-4c49-8f61-22ca8e8060a4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.345181] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cbdec32-817a-4b36-ad9c-54ed0ce570e2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.353727] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9307246c-feb9-4f80-9547-3c21158eaa94 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.359601] env[62405]: DEBUG nova.compute.manager [req-f3d522f9-a4b6-4423-bec4-56532f321c12 req-5d424b9f-c4c4-4576-82f9-f93318b6479c service nova] [instance: b4b89cf6-4159-40fa-8b67-4d8bbf16eb32] Received event network-vif-deleted-ff6b512b-10d4-455e-85af-39beb8f916a2 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1832.370609] env[62405]: DEBUG nova.compute.provider_tree [None req-3f2d050b-8bbd-4aa5-9294-b7a5804eb0e5 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1832.398370] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-16eb3707-6323-4ba5-841d-44e785ce893d tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1832.398777] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c4f41623-bc88-472b-89ec-84af47a47d2f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.410346] env[62405]: DEBUG nova.compute.manager [req-95a0cfd1-1d6a-4d93-913c-271e03a9c377 req-866232d1-590d-48c8-9265-09d2428ba85d service nova] [instance: 9aa9e0de-7314-4d8b-8e9f-b6d330cae914] Received event network-vif-deleted-dd55cf55-bb7f-4660-a37a-f2c0e4abc731 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1832.410643] env[62405]: INFO nova.compute.manager [req-95a0cfd1-1d6a-4d93-913c-271e03a9c377 req-866232d1-590d-48c8-9265-09d2428ba85d service nova] [instance: 9aa9e0de-7314-4d8b-8e9f-b6d330cae914] Neutron deleted interface dd55cf55-bb7f-4660-a37a-f2c0e4abc731; detaching it from the instance and deleting it from the info cache [ 1832.410902] env[62405]: DEBUG nova.network.neutron [req-95a0cfd1-1d6a-4d93-913c-271e03a9c377 req-866232d1-590d-48c8-9265-09d2428ba85d service nova] [instance: 9aa9e0de-7314-4d8b-8e9f-b6d330cae914] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1832.414920] env[62405]: DEBUG oslo_vmware.api [None req-16eb3707-6323-4ba5-841d-44e785ce893d tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for the task: (returnval){ [ 1832.414920] env[62405]: value = "task-1947638" [ 1832.414920] env[62405]: _type = "Task" [ 1832.414920] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1832.428167] env[62405]: DEBUG oslo_vmware.api [None req-16eb3707-6323-4ba5-841d-44e785ce893d tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1947638, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1832.530807] env[62405]: INFO nova.compute.manager [None req-298f3e3e-50d5-4c87-be64-626a9ecfe54d tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] [instance: 73c5b28f-d21d-4ffc-9e67-911e4fb4db66] Took 29.07 seconds to build instance. [ 1832.751277] env[62405]: DEBUG nova.network.neutron [-] [instance: 9aa9e0de-7314-4d8b-8e9f-b6d330cae914] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1832.771817] env[62405]: DEBUG oslo_vmware.api [None req-fb0ee8ac-ffc6-4459-9c34-a47cd9bfcb1f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': task-1947637, 'name': CreateSnapshot_Task, 'duration_secs': 1.202547} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1832.772597] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-fb0ee8ac-ffc6-4459-9c34-a47cd9bfcb1f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d] Created Snapshot of the VM instance {{(pid=62405) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1832.773130] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82571701-cc11-40bb-b9bf-d542037cf29a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.873309] env[62405]: DEBUG nova.scheduler.client.report [None req-3f2d050b-8bbd-4aa5-9294-b7a5804eb0e5 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1832.916335] env[62405]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-be5399b3-ba6e-4cce-8f76-da68aac0f034 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.927740] env[62405]: DEBUG oslo_vmware.api [None req-16eb3707-6323-4ba5-841d-44e785ce893d tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1947638, 'name': PowerOnVM_Task, 'duration_secs': 0.446213} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1832.928812] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-16eb3707-6323-4ba5-841d-44e785ce893d tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1832.929037] env[62405]: DEBUG nova.compute.manager [None req-16eb3707-6323-4ba5-841d-44e785ce893d tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1832.929812] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bac6ce88-8ffb-46b2-b9bd-aa899272227c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.934703] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0f55e6e-4804-42d8-9014-bce91fccd0b5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.970775] env[62405]: DEBUG nova.compute.manager [req-95a0cfd1-1d6a-4d93-913c-271e03a9c377 req-866232d1-590d-48c8-9265-09d2428ba85d service nova] [instance: 9aa9e0de-7314-4d8b-8e9f-b6d330cae914] Detach interface failed, port_id=dd55cf55-bb7f-4660-a37a-f2c0e4abc731, reason: Instance 9aa9e0de-7314-4d8b-8e9f-b6d330cae914 could not be found. {{(pid=62405) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11483}} [ 1833.032999] env[62405]: DEBUG oslo_concurrency.lockutils [None req-298f3e3e-50d5-4c87-be64-626a9ecfe54d tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Lock "73c5b28f-d21d-4ffc-9e67-911e4fb4db66" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.578s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1833.255518] env[62405]: INFO nova.compute.manager [-] [instance: 9aa9e0de-7314-4d8b-8e9f-b6d330cae914] Took 1.48 seconds to deallocate network for instance. [ 1833.291099] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-fb0ee8ac-ffc6-4459-9c34-a47cd9bfcb1f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d] Creating linked-clone VM from snapshot {{(pid=62405) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1833.292110] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-56366b60-6b11-4c56-9383-d7625250f285 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.303871] env[62405]: DEBUG oslo_vmware.api [None req-fb0ee8ac-ffc6-4459-9c34-a47cd9bfcb1f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Waiting for the task: (returnval){ [ 1833.303871] env[62405]: value = "task-1947639" [ 1833.303871] env[62405]: _type = "Task" [ 1833.303871] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1833.316518] env[62405]: DEBUG oslo_vmware.api [None req-fb0ee8ac-ffc6-4459-9c34-a47cd9bfcb1f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': task-1947639, 'name': CloneVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1833.434437] env[62405]: DEBUG oslo_concurrency.lockutils [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Acquiring lock "86378df0-a658-427d-aca5-de25f84eb28b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1833.434757] env[62405]: DEBUG oslo_concurrency.lockutils [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Lock "86378df0-a658-427d-aca5-de25f84eb28b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1833.761965] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4317e200-d3a7-46d4-abcb-a7a0c2661d46 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1833.814256] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2f61574f-5e5b-45da-9e95-74ee6dbbd524 tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Acquiring lock "73c5b28f-d21d-4ffc-9e67-911e4fb4db66" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1833.814514] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2f61574f-5e5b-45da-9e95-74ee6dbbd524 tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Lock "73c5b28f-d21d-4ffc-9e67-911e4fb4db66" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1833.814726] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2f61574f-5e5b-45da-9e95-74ee6dbbd524 tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Acquiring lock "73c5b28f-d21d-4ffc-9e67-911e4fb4db66-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1833.814940] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2f61574f-5e5b-45da-9e95-74ee6dbbd524 tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Lock "73c5b28f-d21d-4ffc-9e67-911e4fb4db66-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1833.815133] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2f61574f-5e5b-45da-9e95-74ee6dbbd524 tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Lock "73c5b28f-d21d-4ffc-9e67-911e4fb4db66-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1833.816871] env[62405]: DEBUG oslo_vmware.api [None req-fb0ee8ac-ffc6-4459-9c34-a47cd9bfcb1f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': task-1947639, 'name': CloneVM_Task} progress is 94%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1833.817345] env[62405]: INFO nova.compute.manager [None req-2f61574f-5e5b-45da-9e95-74ee6dbbd524 tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] [instance: 73c5b28f-d21d-4ffc-9e67-911e4fb4db66] Terminating instance [ 1833.883814] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3f2d050b-8bbd-4aa5-9294-b7a5804eb0e5 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.474s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1833.886737] env[62405]: DEBUG oslo_concurrency.lockutils [None req-00cb0911-d74c-4501-b9bb-7605299ed7c9 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.172s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1833.887058] env[62405]: DEBUG nova.objects.instance [None req-00cb0911-d74c-4501-b9bb-7605299ed7c9 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Lazy-loading 'resources' on Instance uuid 058682a1-5240-4414-9203-c612ecd12999 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1833.937483] env[62405]: DEBUG nova.compute.manager [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 86378df0-a658-427d-aca5-de25f84eb28b] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1834.103250] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9a95d002-c96e-4973-8d29-a5abe39025ec tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Acquiring lock "15218373-ffa5-49ce-b604-423b7fc5fb35" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1834.103594] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9a95d002-c96e-4973-8d29-a5abe39025ec tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Lock "15218373-ffa5-49ce-b604-423b7fc5fb35" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1834.103863] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9a95d002-c96e-4973-8d29-a5abe39025ec tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Acquiring lock "15218373-ffa5-49ce-b604-423b7fc5fb35-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1834.104123] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9a95d002-c96e-4973-8d29-a5abe39025ec tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Lock "15218373-ffa5-49ce-b604-423b7fc5fb35-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1834.104357] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9a95d002-c96e-4973-8d29-a5abe39025ec tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Lock "15218373-ffa5-49ce-b604-423b7fc5fb35-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1834.106675] env[62405]: INFO nova.compute.manager [None req-9a95d002-c96e-4973-8d29-a5abe39025ec tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Terminating instance [ 1834.317187] env[62405]: DEBUG oslo_vmware.api [None req-fb0ee8ac-ffc6-4459-9c34-a47cd9bfcb1f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': task-1947639, 'name': CloneVM_Task} progress is 94%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1834.320965] env[62405]: DEBUG nova.compute.manager [None req-2f61574f-5e5b-45da-9e95-74ee6dbbd524 tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] [instance: 73c5b28f-d21d-4ffc-9e67-911e4fb4db66] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1834.321193] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-2f61574f-5e5b-45da-9e95-74ee6dbbd524 tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] [instance: 73c5b28f-d21d-4ffc-9e67-911e4fb4db66] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1834.321985] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9af1c8d4-7327-4708-91e6-aa8874864d0e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.330584] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f61574f-5e5b-45da-9e95-74ee6dbbd524 tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] [instance: 73c5b28f-d21d-4ffc-9e67-911e4fb4db66] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1834.330851] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d9861e09-b1ac-4cfc-a585-bc086d66df0e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.340220] env[62405]: DEBUG oslo_vmware.api [None req-2f61574f-5e5b-45da-9e95-74ee6dbbd524 tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Waiting for the task: (returnval){ [ 1834.340220] env[62405]: value = "task-1947640" [ 1834.340220] env[62405]: _type = "Task" [ 1834.340220] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1834.351258] env[62405]: DEBUG oslo_vmware.api [None req-2f61574f-5e5b-45da-9e95-74ee6dbbd524 tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Task: {'id': task-1947640, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1834.454564] env[62405]: DEBUG oslo_concurrency.lockutils [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1834.542896] env[62405]: INFO nova.scheduler.client.report [None req-3f2d050b-8bbd-4aa5-9294-b7a5804eb0e5 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Deleted allocation for migration b4c9b590-842e-4bbd-bf8c-7c1854c857a2 [ 1834.614129] env[62405]: DEBUG nova.compute.manager [None req-9a95d002-c96e-4973-8d29-a5abe39025ec tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1834.614129] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-9a95d002-c96e-4973-8d29-a5abe39025ec tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1834.614129] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63e16a34-d5e3-4073-967d-602808ae3822 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.624267] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a95d002-c96e-4973-8d29-a5abe39025ec tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1834.624525] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a5b04456-b9a8-4ac2-b4a6-7f9ae473b480 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.631194] env[62405]: DEBUG oslo_vmware.api [None req-9a95d002-c96e-4973-8d29-a5abe39025ec tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for the task: (returnval){ [ 1834.631194] env[62405]: value = "task-1947641" [ 1834.631194] env[62405]: _type = "Task" [ 1834.631194] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1834.642928] env[62405]: DEBUG oslo_vmware.api [None req-9a95d002-c96e-4973-8d29-a5abe39025ec tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1947641, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1834.789237] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48eaa3f3-b76a-4624-b59d-e87e225f68ac {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.800126] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41f21af7-9138-4ec0-b665-64b93daf1ef6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.834757] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34c67bd6-d0de-45ad-bb42-9380122470a7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.842033] env[62405]: DEBUG oslo_vmware.api [None req-fb0ee8ac-ffc6-4459-9c34-a47cd9bfcb1f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': task-1947639, 'name': CloneVM_Task, 'duration_secs': 1.514902} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1834.845521] env[62405]: INFO nova.virt.vmwareapi.vmops [None req-fb0ee8ac-ffc6-4459-9c34-a47cd9bfcb1f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d] Created linked-clone VM from snapshot [ 1834.848304] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ceb1ab7-d7d5-46f2-8b3c-b202d2f72303 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.852127] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05f919d0-046a-4bac-b716-e856c4341e48 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.863288] env[62405]: DEBUG oslo_vmware.api [None req-2f61574f-5e5b-45da-9e95-74ee6dbbd524 tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Task: {'id': task-1947640, 'name': PowerOffVM_Task, 'duration_secs': 0.2212} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1834.863932] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f61574f-5e5b-45da-9e95-74ee6dbbd524 tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] [instance: 73c5b28f-d21d-4ffc-9e67-911e4fb4db66] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1834.864122] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-2f61574f-5e5b-45da-9e95-74ee6dbbd524 tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] [instance: 73c5b28f-d21d-4ffc-9e67-911e4fb4db66] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1834.864360] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cec6ca6a-5251-4a7c-be76-fe18788cab59 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.868682] env[62405]: DEBUG nova.virt.vmwareapi.images [None req-fb0ee8ac-ffc6-4459-9c34-a47cd9bfcb1f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d] Uploading image b937570e-c655-42e6-a249-915f61aec899 {{(pid=62405) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1834.879279] env[62405]: DEBUG nova.compute.provider_tree [None req-00cb0911-d74c-4501-b9bb-7605299ed7c9 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1834.883265] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb0ee8ac-ffc6-4459-9c34-a47cd9bfcb1f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d] Destroying the VM {{(pid=62405) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1834.883524] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-84b2aeac-da83-4533-8d25-bb811ac30eb1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.890125] env[62405]: DEBUG oslo_vmware.api [None req-fb0ee8ac-ffc6-4459-9c34-a47cd9bfcb1f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Waiting for the task: (returnval){ [ 1834.890125] env[62405]: value = "task-1947643" [ 1834.890125] env[62405]: _type = "Task" [ 1834.890125] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1834.899824] env[62405]: DEBUG oslo_vmware.api [None req-fb0ee8ac-ffc6-4459-9c34-a47cd9bfcb1f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': task-1947643, 'name': Destroy_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1835.048705] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3f2d050b-8bbd-4aa5-9294-b7a5804eb0e5 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Lock "a1a84837-deef-4ffc-8a47-4891bfc2c87a" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 25.601s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1835.131897] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-2f61574f-5e5b-45da-9e95-74ee6dbbd524 tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] [instance: 73c5b28f-d21d-4ffc-9e67-911e4fb4db66] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1835.132175] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-2f61574f-5e5b-45da-9e95-74ee6dbbd524 tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] [instance: 73c5b28f-d21d-4ffc-9e67-911e4fb4db66] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1835.132491] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-2f61574f-5e5b-45da-9e95-74ee6dbbd524 tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Deleting the datastore file [datastore1] 73c5b28f-d21d-4ffc-9e67-911e4fb4db66 {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1835.135853] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c59f2ab3-102d-4056-a0db-8aaa9729a687 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.142848] env[62405]: DEBUG oslo_vmware.api [None req-9a95d002-c96e-4973-8d29-a5abe39025ec tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1947641, 'name': PowerOffVM_Task, 'duration_secs': 0.184292} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1835.144063] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a95d002-c96e-4973-8d29-a5abe39025ec tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1835.144247] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-9a95d002-c96e-4973-8d29-a5abe39025ec tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1835.144542] env[62405]: DEBUG oslo_vmware.api [None req-2f61574f-5e5b-45da-9e95-74ee6dbbd524 tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Waiting for the task: (returnval){ [ 1835.144542] env[62405]: value = "task-1947644" [ 1835.144542] env[62405]: _type = "Task" [ 1835.144542] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1835.144723] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4d71a0c3-92a8-4c38-9adc-308dd1bf80fc {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.154660] env[62405]: DEBUG oslo_vmware.api [None req-2f61574f-5e5b-45da-9e95-74ee6dbbd524 tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Task: {'id': task-1947644, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1835.280507] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-9a95d002-c96e-4973-8d29-a5abe39025ec tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1835.280698] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-9a95d002-c96e-4973-8d29-a5abe39025ec tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1835.280820] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-9a95d002-c96e-4973-8d29-a5abe39025ec tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Deleting the datastore file [datastore1] 15218373-ffa5-49ce-b604-423b7fc5fb35 {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1835.281115] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1686e2f8-5680-4240-8561-a6518d7ee0c1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.287775] env[62405]: DEBUG oslo_vmware.api [None req-9a95d002-c96e-4973-8d29-a5abe39025ec tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for the task: (returnval){ [ 1835.287775] env[62405]: value = "task-1947646" [ 1835.287775] env[62405]: _type = "Task" [ 1835.287775] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1835.295552] env[62405]: DEBUG oslo_vmware.api [None req-9a95d002-c96e-4973-8d29-a5abe39025ec tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1947646, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1835.382786] env[62405]: DEBUG nova.scheduler.client.report [None req-00cb0911-d74c-4501-b9bb-7605299ed7c9 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1835.400157] env[62405]: DEBUG oslo_vmware.api [None req-fb0ee8ac-ffc6-4459-9c34-a47cd9bfcb1f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': task-1947643, 'name': Destroy_Task} progress is 33%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1835.656985] env[62405]: DEBUG oslo_vmware.api [None req-2f61574f-5e5b-45da-9e95-74ee6dbbd524 tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Task: {'id': task-1947644, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.187758} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1835.657369] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-2f61574f-5e5b-45da-9e95-74ee6dbbd524 tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1835.657462] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-2f61574f-5e5b-45da-9e95-74ee6dbbd524 tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] [instance: 73c5b28f-d21d-4ffc-9e67-911e4fb4db66] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1835.657641] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-2f61574f-5e5b-45da-9e95-74ee6dbbd524 tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] [instance: 73c5b28f-d21d-4ffc-9e67-911e4fb4db66] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1835.657818] env[62405]: INFO nova.compute.manager [None req-2f61574f-5e5b-45da-9e95-74ee6dbbd524 tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] [instance: 73c5b28f-d21d-4ffc-9e67-911e4fb4db66] Took 1.34 seconds to destroy the instance on the hypervisor. [ 1835.658111] env[62405]: DEBUG oslo.service.loopingcall [None req-2f61574f-5e5b-45da-9e95-74ee6dbbd524 tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1835.658315] env[62405]: DEBUG nova.compute.manager [-] [instance: 73c5b28f-d21d-4ffc-9e67-911e4fb4db66] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1835.658408] env[62405]: DEBUG nova.network.neutron [-] [instance: 73c5b28f-d21d-4ffc-9e67-911e4fb4db66] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1835.798425] env[62405]: DEBUG oslo_vmware.api [None req-9a95d002-c96e-4973-8d29-a5abe39025ec tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1947646, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.185085} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1835.798675] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-9a95d002-c96e-4973-8d29-a5abe39025ec tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1835.798860] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-9a95d002-c96e-4973-8d29-a5abe39025ec tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1835.799057] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-9a95d002-c96e-4973-8d29-a5abe39025ec tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1835.799271] env[62405]: INFO nova.compute.manager [None req-9a95d002-c96e-4973-8d29-a5abe39025ec tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1835.799510] env[62405]: DEBUG oslo.service.loopingcall [None req-9a95d002-c96e-4973-8d29-a5abe39025ec tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1835.799699] env[62405]: DEBUG nova.compute.manager [-] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1835.799845] env[62405]: DEBUG nova.network.neutron [-] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1835.887464] env[62405]: DEBUG oslo_concurrency.lockutils [None req-00cb0911-d74c-4501-b9bb-7605299ed7c9 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1835.890845] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3f4a7c9f-6417-4f6d-b761-d9764b134ea4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.785s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1835.895588] env[62405]: INFO nova.compute.claims [None req-3f4a7c9f-6417-4f6d-b761-d9764b134ea4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: 271cec64-e7b4-4a1b-a7d6-f3fd60086209] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1835.909033] env[62405]: DEBUG oslo_vmware.api [None req-fb0ee8ac-ffc6-4459-9c34-a47cd9bfcb1f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': task-1947643, 'name': Destroy_Task, 'duration_secs': 0.658529} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1835.909033] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-fb0ee8ac-ffc6-4459-9c34-a47cd9bfcb1f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d] Destroyed the VM [ 1835.909538] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-fb0ee8ac-ffc6-4459-9c34-a47cd9bfcb1f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d] Deleting Snapshot of the VM instance {{(pid=62405) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1835.910457] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-0bc70709-f219-43f9-b8ec-9adf4b9e34d0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.920412] env[62405]: DEBUG oslo_vmware.api [None req-fb0ee8ac-ffc6-4459-9c34-a47cd9bfcb1f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Waiting for the task: (returnval){ [ 1835.920412] env[62405]: value = "task-1947647" [ 1835.920412] env[62405]: _type = "Task" [ 1835.920412] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1835.925552] env[62405]: INFO nova.scheduler.client.report [None req-00cb0911-d74c-4501-b9bb-7605299ed7c9 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Deleted allocations for instance 058682a1-5240-4414-9203-c612ecd12999 [ 1835.931247] env[62405]: DEBUG oslo_vmware.api [None req-fb0ee8ac-ffc6-4459-9c34-a47cd9bfcb1f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': task-1947647, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1836.122034] env[62405]: DEBUG nova.compute.manager [req-c63f538f-ba42-4a65-9905-dfdc8ffcc46f req-52c8bf40-6b85-49d0-a357-21c0b00256d6 service nova] [instance: 73c5b28f-d21d-4ffc-9e67-911e4fb4db66] Received event network-vif-deleted-8ec05620-75a4-4851-8cba-d4fc068e33e8 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1836.122317] env[62405]: INFO nova.compute.manager [req-c63f538f-ba42-4a65-9905-dfdc8ffcc46f req-52c8bf40-6b85-49d0-a357-21c0b00256d6 service nova] [instance: 73c5b28f-d21d-4ffc-9e67-911e4fb4db66] Neutron deleted interface 8ec05620-75a4-4851-8cba-d4fc068e33e8; detaching it from the instance and deleting it from the info cache [ 1836.122835] env[62405]: DEBUG nova.network.neutron [req-c63f538f-ba42-4a65-9905-dfdc8ffcc46f req-52c8bf40-6b85-49d0-a357-21c0b00256d6 service nova] [instance: 73c5b28f-d21d-4ffc-9e67-911e4fb4db66] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1836.308220] env[62405]: DEBUG oslo_concurrency.lockutils [None req-96b62204-f272-43ef-93d3-fc38c043a21e tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquiring lock "a1a84837-deef-4ffc-8a47-4891bfc2c87a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1836.308548] env[62405]: DEBUG oslo_concurrency.lockutils [None req-96b62204-f272-43ef-93d3-fc38c043a21e tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Lock "a1a84837-deef-4ffc-8a47-4891bfc2c87a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1836.308777] env[62405]: DEBUG oslo_concurrency.lockutils [None req-96b62204-f272-43ef-93d3-fc38c043a21e tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquiring lock "a1a84837-deef-4ffc-8a47-4891bfc2c87a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1836.308964] env[62405]: DEBUG oslo_concurrency.lockutils [None req-96b62204-f272-43ef-93d3-fc38c043a21e tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Lock "a1a84837-deef-4ffc-8a47-4891bfc2c87a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1836.309160] env[62405]: DEBUG oslo_concurrency.lockutils [None req-96b62204-f272-43ef-93d3-fc38c043a21e tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Lock "a1a84837-deef-4ffc-8a47-4891bfc2c87a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1836.315995] env[62405]: INFO nova.compute.manager [None req-96b62204-f272-43ef-93d3-fc38c043a21e tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a1a84837-deef-4ffc-8a47-4891bfc2c87a] Terminating instance [ 1836.372310] env[62405]: DEBUG nova.compute.manager [req-9105811a-8325-44e6-81f1-6d335da3fedf req-befe8fc5-0957-41a5-921e-fd5726808d31 service nova] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Received event network-vif-deleted-7e786917-4e46-4359-899e-afc1456451ae {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1836.372516] env[62405]: INFO nova.compute.manager [req-9105811a-8325-44e6-81f1-6d335da3fedf req-befe8fc5-0957-41a5-921e-fd5726808d31 service nova] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Neutron deleted interface 7e786917-4e46-4359-899e-afc1456451ae; detaching it from the instance and deleting it from the info cache [ 1836.372810] env[62405]: DEBUG nova.network.neutron [req-9105811a-8325-44e6-81f1-6d335da3fedf req-befe8fc5-0957-41a5-921e-fd5726808d31 service nova] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1836.430895] env[62405]: DEBUG oslo_vmware.api [None req-fb0ee8ac-ffc6-4459-9c34-a47cd9bfcb1f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': task-1947647, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1836.437690] env[62405]: DEBUG oslo_concurrency.lockutils [None req-00cb0911-d74c-4501-b9bb-7605299ed7c9 tempest-MigrationsAdminTest-494957219 tempest-MigrationsAdminTest-494957219-project-member] Lock "058682a1-5240-4414-9203-c612ecd12999" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.451s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1836.498965] env[62405]: DEBUG nova.network.neutron [-] [instance: 73c5b28f-d21d-4ffc-9e67-911e4fb4db66] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1836.625156] env[62405]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cd8b3bb8-cdb2-43e9-be3d-f87b5dfeb967 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.635867] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65eb77ab-1d5a-41f6-9909-bbd506445d13 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.672209] env[62405]: DEBUG nova.compute.manager [req-c63f538f-ba42-4a65-9905-dfdc8ffcc46f req-52c8bf40-6b85-49d0-a357-21c0b00256d6 service nova] [instance: 73c5b28f-d21d-4ffc-9e67-911e4fb4db66] Detach interface failed, port_id=8ec05620-75a4-4851-8cba-d4fc068e33e8, reason: Instance 73c5b28f-d21d-4ffc-9e67-911e4fb4db66 could not be found. {{(pid=62405) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11483}} [ 1836.824302] env[62405]: DEBUG nova.compute.manager [None req-96b62204-f272-43ef-93d3-fc38c043a21e tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a1a84837-deef-4ffc-8a47-4891bfc2c87a] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1836.824568] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-96b62204-f272-43ef-93d3-fc38c043a21e tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a1a84837-deef-4ffc-8a47-4891bfc2c87a] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1836.825521] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f69d690-e310-4ecb-a940-13bbd7ea4ea2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.833707] env[62405]: DEBUG nova.network.neutron [-] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1836.835505] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-96b62204-f272-43ef-93d3-fc38c043a21e tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a1a84837-deef-4ffc-8a47-4891bfc2c87a] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1836.835711] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4725afdc-9e1d-4377-a82a-e439c2db6629 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.844423] env[62405]: DEBUG oslo_vmware.api [None req-96b62204-f272-43ef-93d3-fc38c043a21e tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for the task: (returnval){ [ 1836.844423] env[62405]: value = "task-1947648" [ 1836.844423] env[62405]: _type = "Task" [ 1836.844423] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1836.855030] env[62405]: DEBUG oslo_vmware.api [None req-96b62204-f272-43ef-93d3-fc38c043a21e tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947648, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1836.874781] env[62405]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-642b6807-13a6-4b36-8110-e5fad2b70183 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.884478] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4605223-aa34-4dee-ad7b-f50740326f76 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.922970] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4eb17867-a80f-4d5a-af1b-ffbdf0589cd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquiring lock "81aebf11-5d80-4a86-b232-3ecc5f3892c2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1836.926439] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4eb17867-a80f-4d5a-af1b-ffbdf0589cd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Lock "81aebf11-5d80-4a86-b232-3ecc5f3892c2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1836.926439] env[62405]: DEBUG nova.compute.manager [req-9105811a-8325-44e6-81f1-6d335da3fedf req-befe8fc5-0957-41a5-921e-fd5726808d31 service nova] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Detach interface failed, port_id=7e786917-4e46-4359-899e-afc1456451ae, reason: Instance 15218373-ffa5-49ce-b604-423b7fc5fb35 could not be found. {{(pid=62405) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11483}} [ 1836.934052] env[62405]: DEBUG oslo_vmware.api [None req-fb0ee8ac-ffc6-4459-9c34-a47cd9bfcb1f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': task-1947647, 'name': RemoveSnapshot_Task, 'duration_secs': 0.674669} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1836.934310] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-fb0ee8ac-ffc6-4459-9c34-a47cd9bfcb1f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d] Deleted Snapshot of the VM instance {{(pid=62405) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1837.002357] env[62405]: INFO nova.compute.manager [-] [instance: 73c5b28f-d21d-4ffc-9e67-911e4fb4db66] Took 1.34 seconds to deallocate network for instance. [ 1837.265280] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5b7163a-64d8-40b5-9538-9035e55c75aa {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.274186] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-beceb5cb-e403-44ab-a45f-a1d43a3d542f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.308601] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf42c17c-75d4-46e6-bde5-74fcb5c4da70 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.317598] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3a354e8-b4d0-4f62-b307-7e0c0bd23de8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.332547] env[62405]: DEBUG nova.compute.provider_tree [None req-3f4a7c9f-6417-4f6d-b761-d9764b134ea4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1837.336151] env[62405]: INFO nova.compute.manager [-] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Took 1.54 seconds to deallocate network for instance. [ 1837.355853] env[62405]: DEBUG oslo_vmware.api [None req-96b62204-f272-43ef-93d3-fc38c043a21e tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947648, 'name': PowerOffVM_Task, 'duration_secs': 0.483414} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1837.356199] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-96b62204-f272-43ef-93d3-fc38c043a21e tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a1a84837-deef-4ffc-8a47-4891bfc2c87a] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1837.356391] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-96b62204-f272-43ef-93d3-fc38c043a21e tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a1a84837-deef-4ffc-8a47-4891bfc2c87a] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1837.356633] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a5e1340a-81b0-4a6e-a4f4-e90292aa8c85 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.429143] env[62405]: DEBUG nova.compute.manager [None req-4eb17867-a80f-4d5a-af1b-ffbdf0589cd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: 81aebf11-5d80-4a86-b232-3ecc5f3892c2] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1837.439285] env[62405]: WARNING nova.compute.manager [None req-fb0ee8ac-ffc6-4459-9c34-a47cd9bfcb1f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d] Image not found during snapshot: nova.exception.ImageNotFound: Image b937570e-c655-42e6-a249-915f61aec899 could not be found. [ 1837.496302] env[62405]: DEBUG nova.objects.instance [None req-8d4fc17b-9c71-4ab1-83bf-fcf423667e32 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Lazy-loading 'flavor' on Instance uuid 742c8d94-48d1-4408-91dc-98f25661aa8d {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1837.508973] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2f61574f-5e5b-45da-9e95-74ee6dbbd524 tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1837.579739] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-96b62204-f272-43ef-93d3-fc38c043a21e tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a1a84837-deef-4ffc-8a47-4891bfc2c87a] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1837.579972] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-96b62204-f272-43ef-93d3-fc38c043a21e tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a1a84837-deef-4ffc-8a47-4891bfc2c87a] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1837.580170] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-96b62204-f272-43ef-93d3-fc38c043a21e tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Deleting the datastore file [datastore1] a1a84837-deef-4ffc-8a47-4891bfc2c87a {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1837.580432] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ee501a18-6d5b-4b38-b7bd-cbcbf3f470e2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.588727] env[62405]: DEBUG oslo_vmware.api [None req-96b62204-f272-43ef-93d3-fc38c043a21e tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for the task: (returnval){ [ 1837.588727] env[62405]: value = "task-1947650" [ 1837.588727] env[62405]: _type = "Task" [ 1837.588727] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1837.597382] env[62405]: DEBUG oslo_vmware.api [None req-96b62204-f272-43ef-93d3-fc38c043a21e tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947650, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1837.839436] env[62405]: DEBUG nova.scheduler.client.report [None req-3f4a7c9f-6417-4f6d-b761-d9764b134ea4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1837.845263] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9a95d002-c96e-4973-8d29-a5abe39025ec tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1837.948076] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4eb17867-a80f-4d5a-af1b-ffbdf0589cd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1838.001865] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8d4fc17b-9c71-4ab1-83bf-fcf423667e32 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Acquiring lock "refresh_cache-742c8d94-48d1-4408-91dc-98f25661aa8d" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1838.001865] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8d4fc17b-9c71-4ab1-83bf-fcf423667e32 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Acquired lock "refresh_cache-742c8d94-48d1-4408-91dc-98f25661aa8d" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1838.101134] env[62405]: DEBUG oslo_vmware.api [None req-96b62204-f272-43ef-93d3-fc38c043a21e tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947650, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.13139} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1838.101211] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-96b62204-f272-43ef-93d3-fc38c043a21e tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1838.101769] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-96b62204-f272-43ef-93d3-fc38c043a21e tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a1a84837-deef-4ffc-8a47-4891bfc2c87a] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1838.101921] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-96b62204-f272-43ef-93d3-fc38c043a21e tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a1a84837-deef-4ffc-8a47-4891bfc2c87a] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1838.102136] env[62405]: INFO nova.compute.manager [None req-96b62204-f272-43ef-93d3-fc38c043a21e tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: a1a84837-deef-4ffc-8a47-4891bfc2c87a] Took 1.28 seconds to destroy the instance on the hypervisor. [ 1838.102577] env[62405]: DEBUG oslo.service.loopingcall [None req-96b62204-f272-43ef-93d3-fc38c043a21e tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1838.102577] env[62405]: DEBUG nova.compute.manager [-] [instance: a1a84837-deef-4ffc-8a47-4891bfc2c87a] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1838.102662] env[62405]: DEBUG nova.network.neutron [-] [instance: a1a84837-deef-4ffc-8a47-4891bfc2c87a] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1838.214367] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f4454d09-a9d0-41fe-b7a9-31e8732a8ffa tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Acquiring lock "d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1838.214367] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f4454d09-a9d0-41fe-b7a9-31e8732a8ffa tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Lock "d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1838.214367] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f4454d09-a9d0-41fe-b7a9-31e8732a8ffa tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Acquiring lock "d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1838.214634] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f4454d09-a9d0-41fe-b7a9-31e8732a8ffa tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Lock "d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1838.214634] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f4454d09-a9d0-41fe-b7a9-31e8732a8ffa tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Lock "d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1838.216748] env[62405]: INFO nova.compute.manager [None req-f4454d09-a9d0-41fe-b7a9-31e8732a8ffa tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d] Terminating instance [ 1838.347133] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3f4a7c9f-6417-4f6d-b761-d9764b134ea4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.456s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1838.347671] env[62405]: DEBUG nova.compute.manager [None req-3f4a7c9f-6417-4f6d-b761-d9764b134ea4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: 271cec64-e7b4-4a1b-a7d6-f3fd60086209] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1838.351763] env[62405]: DEBUG oslo_concurrency.lockutils [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.986s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1838.351981] env[62405]: INFO nova.compute.claims [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] [instance: 65cd4af4-30cf-4435-8f32-501db450905f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1838.428369] env[62405]: DEBUG nova.network.neutron [None req-8d4fc17b-9c71-4ab1-83bf-fcf423667e32 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] [instance: 742c8d94-48d1-4408-91dc-98f25661aa8d] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1838.479263] env[62405]: DEBUG nova.compute.manager [req-0f6e3165-1d68-4f0a-8c55-17dfc438e842 req-9b8c4962-b9bd-4a71-943a-5ed692df4325 service nova] [instance: a1a84837-deef-4ffc-8a47-4891bfc2c87a] Received event network-vif-deleted-64634a81-f1e1-4078-894a-2f4e8b56de13 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1838.479263] env[62405]: INFO nova.compute.manager [req-0f6e3165-1d68-4f0a-8c55-17dfc438e842 req-9b8c4962-b9bd-4a71-943a-5ed692df4325 service nova] [instance: a1a84837-deef-4ffc-8a47-4891bfc2c87a] Neutron deleted interface 64634a81-f1e1-4078-894a-2f4e8b56de13; detaching it from the instance and deleting it from the info cache [ 1838.479263] env[62405]: DEBUG nova.network.neutron [req-0f6e3165-1d68-4f0a-8c55-17dfc438e842 req-9b8c4962-b9bd-4a71-943a-5ed692df4325 service nova] [instance: a1a84837-deef-4ffc-8a47-4891bfc2c87a] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1838.496358] env[62405]: DEBUG nova.compute.manager [req-5d59dbc2-8239-49d3-ace0-13a086e54813 req-b4fbd502-b573-426a-ac12-d02d11a8863b service nova] [instance: 742c8d94-48d1-4408-91dc-98f25661aa8d] Received event network-changed-4cdbc50f-67a4-4007-ba8c-4b0690bb67c5 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1838.496358] env[62405]: DEBUG nova.compute.manager [req-5d59dbc2-8239-49d3-ace0-13a086e54813 req-b4fbd502-b573-426a-ac12-d02d11a8863b service nova] [instance: 742c8d94-48d1-4408-91dc-98f25661aa8d] Refreshing instance network info cache due to event network-changed-4cdbc50f-67a4-4007-ba8c-4b0690bb67c5. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1838.496358] env[62405]: DEBUG oslo_concurrency.lockutils [req-5d59dbc2-8239-49d3-ace0-13a086e54813 req-b4fbd502-b573-426a-ac12-d02d11a8863b service nova] Acquiring lock "refresh_cache-742c8d94-48d1-4408-91dc-98f25661aa8d" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1838.721407] env[62405]: DEBUG nova.compute.manager [None req-f4454d09-a9d0-41fe-b7a9-31e8732a8ffa tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1838.721636] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-f4454d09-a9d0-41fe-b7a9-31e8732a8ffa tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1838.722597] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68977217-019b-44c4-9ed4-17adeb6b7f50 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.731438] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4454d09-a9d0-41fe-b7a9-31e8732a8ffa tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1838.731438] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-55652053-12e9-40db-b979-6dd6c894580c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.739535] env[62405]: DEBUG oslo_vmware.api [None req-f4454d09-a9d0-41fe-b7a9-31e8732a8ffa tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Waiting for the task: (returnval){ [ 1838.739535] env[62405]: value = "task-1947651" [ 1838.739535] env[62405]: _type = "Task" [ 1838.739535] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1838.748921] env[62405]: DEBUG oslo_vmware.api [None req-f4454d09-a9d0-41fe-b7a9-31e8732a8ffa tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': task-1947651, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1838.857305] env[62405]: DEBUG nova.compute.utils [None req-3f4a7c9f-6417-4f6d-b761-d9764b134ea4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1838.862116] env[62405]: DEBUG nova.compute.manager [None req-3f4a7c9f-6417-4f6d-b761-d9764b134ea4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: 271cec64-e7b4-4a1b-a7d6-f3fd60086209] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1838.862364] env[62405]: DEBUG nova.network.neutron [None req-3f4a7c9f-6417-4f6d-b761-d9764b134ea4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: 271cec64-e7b4-4a1b-a7d6-f3fd60086209] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1838.906238] env[62405]: DEBUG nova.policy [None req-3f4a7c9f-6417-4f6d-b761-d9764b134ea4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5386353a4f6d41d0be6b056a129eb125', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '521150d8f23f4f76a0c785481c99e897', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1838.916254] env[62405]: DEBUG nova.network.neutron [-] [instance: a1a84837-deef-4ffc-8a47-4891bfc2c87a] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1838.982160] env[62405]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bda4edbc-0082-4652-96e1-0b82bf8878b4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.993104] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73ba7b07-7910-4ad5-a9f1-6182a2f70acd {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.031400] env[62405]: DEBUG nova.compute.manager [req-0f6e3165-1d68-4f0a-8c55-17dfc438e842 req-9b8c4962-b9bd-4a71-943a-5ed692df4325 service nova] [instance: a1a84837-deef-4ffc-8a47-4891bfc2c87a] Detach interface failed, port_id=64634a81-f1e1-4078-894a-2f4e8b56de13, reason: Instance a1a84837-deef-4ffc-8a47-4891bfc2c87a could not be found. {{(pid=62405) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11483}} [ 1839.201732] env[62405]: DEBUG nova.network.neutron [None req-3f4a7c9f-6417-4f6d-b761-d9764b134ea4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: 271cec64-e7b4-4a1b-a7d6-f3fd60086209] Successfully created port: 37b28c39-c3f7-4c42-b4a2-3b9836cf0ded {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1839.251640] env[62405]: DEBUG oslo_vmware.api [None req-f4454d09-a9d0-41fe-b7a9-31e8732a8ffa tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': task-1947651, 'name': PowerOffVM_Task, 'duration_secs': 0.189465} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1839.251754] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4454d09-a9d0-41fe-b7a9-31e8732a8ffa tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1839.251906] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-f4454d09-a9d0-41fe-b7a9-31e8732a8ffa tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1839.252218] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-10c4e11c-81d4-45b0-a14f-36c396d046f4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.327207] env[62405]: DEBUG nova.network.neutron [None req-8d4fc17b-9c71-4ab1-83bf-fcf423667e32 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] [instance: 742c8d94-48d1-4408-91dc-98f25661aa8d] Updating instance_info_cache with network_info: [{"id": "4cdbc50f-67a4-4007-ba8c-4b0690bb67c5", "address": "fa:16:3e:1b:ec:6c", "network": {"id": "d6172d52-fb9e-4751-8a35-39c4d4546683", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-778897133-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.213", "type": "floating", "version": 4, "meta": {}}]}, {"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "650fd0d6b10b4b88aac64a5b51c10ee7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8f580e6-1d86-41ee-9ebe-c531cb9299c6", "external-id": "nsx-vlan-transportzone-150", "segmentation_id": 150, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4cdbc50f-67", "ovs_interfaceid": "4cdbc50f-67a4-4007-ba8c-4b0690bb67c5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1839.362832] env[62405]: DEBUG nova.compute.manager [None req-3f4a7c9f-6417-4f6d-b761-d9764b134ea4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: 271cec64-e7b4-4a1b-a7d6-f3fd60086209] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1839.418671] env[62405]: INFO nova.compute.manager [-] [instance: a1a84837-deef-4ffc-8a47-4891bfc2c87a] Took 1.32 seconds to deallocate network for instance. [ 1839.552975] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-f4454d09-a9d0-41fe-b7a9-31e8732a8ffa tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1839.553227] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-f4454d09-a9d0-41fe-b7a9-31e8732a8ffa tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1839.553409] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4454d09-a9d0-41fe-b7a9-31e8732a8ffa tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Deleting the datastore file [datastore1] d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1839.553959] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5deae6e2-00bd-46bc-a6d7-de55019bece2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.562154] env[62405]: DEBUG oslo_vmware.api [None req-f4454d09-a9d0-41fe-b7a9-31e8732a8ffa tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Waiting for the task: (returnval){ [ 1839.562154] env[62405]: value = "task-1947653" [ 1839.562154] env[62405]: _type = "Task" [ 1839.562154] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1839.574321] env[62405]: DEBUG oslo_vmware.api [None req-f4454d09-a9d0-41fe-b7a9-31e8732a8ffa tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': task-1947653, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1839.717724] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e534f23c-667f-4450-b660-5f284bf60c67 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.726238] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3342ed0-1591-4d64-9b5d-d7ee4ca9f4d0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.762794] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50f1d117-f031-493d-9282-d5fdfc31bb1f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.770568] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-853a2d96-375e-451c-b0ab-6c4961a48b19 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.778598] env[62405]: DEBUG nova.objects.instance [None req-4a9d2454-f4bf-48a4-9d47-cd6399e30d0b tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Lazy-loading 'flavor' on Instance uuid 742c8d94-48d1-4408-91dc-98f25661aa8d {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1839.789017] env[62405]: DEBUG nova.compute.provider_tree [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1839.830297] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8d4fc17b-9c71-4ab1-83bf-fcf423667e32 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Releasing lock "refresh_cache-742c8d94-48d1-4408-91dc-98f25661aa8d" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1839.830576] env[62405]: DEBUG nova.compute.manager [None req-8d4fc17b-9c71-4ab1-83bf-fcf423667e32 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] [instance: 742c8d94-48d1-4408-91dc-98f25661aa8d] Inject network info {{(pid=62405) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7737}} [ 1839.830838] env[62405]: DEBUG nova.compute.manager [None req-8d4fc17b-9c71-4ab1-83bf-fcf423667e32 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] [instance: 742c8d94-48d1-4408-91dc-98f25661aa8d] network_info to inject: |[{"id": "4cdbc50f-67a4-4007-ba8c-4b0690bb67c5", "address": "fa:16:3e:1b:ec:6c", "network": {"id": "d6172d52-fb9e-4751-8a35-39c4d4546683", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-778897133-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.213", "type": "floating", "version": 4, "meta": {}}]}, {"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "650fd0d6b10b4b88aac64a5b51c10ee7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8f580e6-1d86-41ee-9ebe-c531cb9299c6", "external-id": "nsx-vlan-transportzone-150", "segmentation_id": 150, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4cdbc50f-67", "ovs_interfaceid": "4cdbc50f-67a4-4007-ba8c-4b0690bb67c5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7738}} [ 1839.836268] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-8d4fc17b-9c71-4ab1-83bf-fcf423667e32 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] [instance: 742c8d94-48d1-4408-91dc-98f25661aa8d] Reconfiguring VM instance to set the machine id {{(pid=62405) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 1839.837409] env[62405]: DEBUG oslo_concurrency.lockutils [req-5d59dbc2-8239-49d3-ace0-13a086e54813 req-b4fbd502-b573-426a-ac12-d02d11a8863b service nova] Acquired lock "refresh_cache-742c8d94-48d1-4408-91dc-98f25661aa8d" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1839.837604] env[62405]: DEBUG nova.network.neutron [req-5d59dbc2-8239-49d3-ace0-13a086e54813 req-b4fbd502-b573-426a-ac12-d02d11a8863b service nova] [instance: 742c8d94-48d1-4408-91dc-98f25661aa8d] Refreshing network info cache for port 4cdbc50f-67a4-4007-ba8c-4b0690bb67c5 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1839.838855] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c1cbb0f5-9cb6-4ba4-b35c-aeb8308245f8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.861089] env[62405]: DEBUG oslo_vmware.api [None req-8d4fc17b-9c71-4ab1-83bf-fcf423667e32 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Waiting for the task: (returnval){ [ 1839.861089] env[62405]: value = "task-1947654" [ 1839.861089] env[62405]: _type = "Task" [ 1839.861089] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1839.873380] env[62405]: DEBUG oslo_vmware.api [None req-8d4fc17b-9c71-4ab1-83bf-fcf423667e32 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Task: {'id': task-1947654, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1839.928198] env[62405]: DEBUG oslo_concurrency.lockutils [None req-96b62204-f272-43ef-93d3-fc38c043a21e tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1840.072216] env[62405]: DEBUG oslo_vmware.api [None req-f4454d09-a9d0-41fe-b7a9-31e8732a8ffa tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': task-1947653, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.237399} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1840.072762] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4454d09-a9d0-41fe-b7a9-31e8732a8ffa tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1840.072762] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-f4454d09-a9d0-41fe-b7a9-31e8732a8ffa tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1840.072994] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-f4454d09-a9d0-41fe-b7a9-31e8732a8ffa tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1840.072994] env[62405]: INFO nova.compute.manager [None req-f4454d09-a9d0-41fe-b7a9-31e8732a8ffa tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d] Took 1.35 seconds to destroy the instance on the hypervisor. [ 1840.073274] env[62405]: DEBUG oslo.service.loopingcall [None req-f4454d09-a9d0-41fe-b7a9-31e8732a8ffa tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1840.073570] env[62405]: DEBUG nova.compute.manager [-] [instance: d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1840.073570] env[62405]: DEBUG nova.network.neutron [-] [instance: d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1840.161557] env[62405]: DEBUG nova.network.neutron [req-5d59dbc2-8239-49d3-ace0-13a086e54813 req-b4fbd502-b573-426a-ac12-d02d11a8863b service nova] [instance: 742c8d94-48d1-4408-91dc-98f25661aa8d] Updated VIF entry in instance network info cache for port 4cdbc50f-67a4-4007-ba8c-4b0690bb67c5. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1840.161959] env[62405]: DEBUG nova.network.neutron [req-5d59dbc2-8239-49d3-ace0-13a086e54813 req-b4fbd502-b573-426a-ac12-d02d11a8863b service nova] [instance: 742c8d94-48d1-4408-91dc-98f25661aa8d] Updating instance_info_cache with network_info: [{"id": "4cdbc50f-67a4-4007-ba8c-4b0690bb67c5", "address": "fa:16:3e:1b:ec:6c", "network": {"id": "d6172d52-fb9e-4751-8a35-39c4d4546683", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-778897133-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.213", "type": "floating", "version": 4, "meta": {}}]}, {"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "650fd0d6b10b4b88aac64a5b51c10ee7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8f580e6-1d86-41ee-9ebe-c531cb9299c6", "external-id": "nsx-vlan-transportzone-150", "segmentation_id": 150, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4cdbc50f-67", "ovs_interfaceid": "4cdbc50f-67a4-4007-ba8c-4b0690bb67c5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1840.286949] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4a9d2454-f4bf-48a4-9d47-cd6399e30d0b tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Acquiring lock "refresh_cache-742c8d94-48d1-4408-91dc-98f25661aa8d" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1840.291932] env[62405]: DEBUG nova.scheduler.client.report [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1840.371856] env[62405]: DEBUG oslo_vmware.api [None req-8d4fc17b-9c71-4ab1-83bf-fcf423667e32 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Task: {'id': task-1947654, 'name': ReconfigVM_Task, 'duration_secs': 0.232652} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1840.372105] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-8d4fc17b-9c71-4ab1-83bf-fcf423667e32 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] [instance: 742c8d94-48d1-4408-91dc-98f25661aa8d] Reconfigured VM instance to set the machine id {{(pid=62405) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 1840.375065] env[62405]: DEBUG nova.compute.manager [None req-3f4a7c9f-6417-4f6d-b761-d9764b134ea4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: 271cec64-e7b4-4a1b-a7d6-f3fd60086209] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1840.402424] env[62405]: DEBUG nova.virt.hardware [None req-3f4a7c9f-6417-4f6d-b761-d9764b134ea4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1840.402709] env[62405]: DEBUG nova.virt.hardware [None req-3f4a7c9f-6417-4f6d-b761-d9764b134ea4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1840.402931] env[62405]: DEBUG nova.virt.hardware [None req-3f4a7c9f-6417-4f6d-b761-d9764b134ea4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1840.403112] env[62405]: DEBUG nova.virt.hardware [None req-3f4a7c9f-6417-4f6d-b761-d9764b134ea4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1840.403495] env[62405]: DEBUG nova.virt.hardware [None req-3f4a7c9f-6417-4f6d-b761-d9764b134ea4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1840.403495] env[62405]: DEBUG nova.virt.hardware [None req-3f4a7c9f-6417-4f6d-b761-d9764b134ea4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1840.403678] env[62405]: DEBUG nova.virt.hardware [None req-3f4a7c9f-6417-4f6d-b761-d9764b134ea4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1840.403841] env[62405]: DEBUG nova.virt.hardware [None req-3f4a7c9f-6417-4f6d-b761-d9764b134ea4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1840.404020] env[62405]: DEBUG nova.virt.hardware [None req-3f4a7c9f-6417-4f6d-b761-d9764b134ea4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1840.404188] env[62405]: DEBUG nova.virt.hardware [None req-3f4a7c9f-6417-4f6d-b761-d9764b134ea4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1840.404362] env[62405]: DEBUG nova.virt.hardware [None req-3f4a7c9f-6417-4f6d-b761-d9764b134ea4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1840.405232] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43704391-2d71-4e9b-becb-666499586612 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.414881] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-393d0d9e-cf9c-4220-b0af-3ac6e76f84b6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.549134] env[62405]: DEBUG nova.compute.manager [req-039da564-4e97-4035-bf65-cd385912fe9f req-36de6414-e88b-4963-98a4-16bfbfe0f0bb service nova] [instance: d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d] Received event network-vif-deleted-a051e2aa-2501-4f7a-82b2-25f0988776c6 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1840.549134] env[62405]: INFO nova.compute.manager [req-039da564-4e97-4035-bf65-cd385912fe9f req-36de6414-e88b-4963-98a4-16bfbfe0f0bb service nova] [instance: d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d] Neutron deleted interface a051e2aa-2501-4f7a-82b2-25f0988776c6; detaching it from the instance and deleting it from the info cache [ 1840.549134] env[62405]: DEBUG nova.network.neutron [req-039da564-4e97-4035-bf65-cd385912fe9f req-36de6414-e88b-4963-98a4-16bfbfe0f0bb service nova] [instance: d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1840.664384] env[62405]: DEBUG oslo_concurrency.lockutils [req-5d59dbc2-8239-49d3-ace0-13a086e54813 req-b4fbd502-b573-426a-ac12-d02d11a8863b service nova] Releasing lock "refresh_cache-742c8d94-48d1-4408-91dc-98f25661aa8d" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1840.664872] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4a9d2454-f4bf-48a4-9d47-cd6399e30d0b tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Acquired lock "refresh_cache-742c8d94-48d1-4408-91dc-98f25661aa8d" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1840.771409] env[62405]: DEBUG nova.network.neutron [None req-3f4a7c9f-6417-4f6d-b761-d9764b134ea4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: 271cec64-e7b4-4a1b-a7d6-f3fd60086209] Successfully updated port: 37b28c39-c3f7-4c42-b4a2-3b9836cf0ded {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1840.800017] env[62405]: DEBUG oslo_concurrency.lockutils [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.446s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1840.800017] env[62405]: DEBUG nova.compute.manager [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] [instance: 65cd4af4-30cf-4435-8f32-501db450905f] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1840.800347] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a850d796-3c6b-462c-b92a-1d17ad0461e5 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.644s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1840.800671] env[62405]: DEBUG nova.objects.instance [None req-a850d796-3c6b-462c-b92a-1d17ad0461e5 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Lazy-loading 'resources' on Instance uuid 2ab5f28c-1f71-4bea-8733-523e5570f5c6 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1840.837773] env[62405]: DEBUG nova.network.neutron [-] [instance: d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1841.011145] env[62405]: DEBUG nova.network.neutron [None req-4a9d2454-f4bf-48a4-9d47-cd6399e30d0b tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] [instance: 742c8d94-48d1-4408-91dc-98f25661aa8d] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1841.051314] env[62405]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-de6a2116-53c7-4ea8-952f-4f214dc7b006 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.062186] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49064c7f-29d0-43f6-9462-b9d55b85c4e0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.094582] env[62405]: DEBUG nova.compute.manager [req-039da564-4e97-4035-bf65-cd385912fe9f req-36de6414-e88b-4963-98a4-16bfbfe0f0bb service nova] [instance: d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d] Detach interface failed, port_id=a051e2aa-2501-4f7a-82b2-25f0988776c6, reason: Instance d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d could not be found. {{(pid=62405) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11483}} [ 1841.274639] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3f4a7c9f-6417-4f6d-b761-d9764b134ea4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Acquiring lock "refresh_cache-271cec64-e7b4-4a1b-a7d6-f3fd60086209" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1841.274639] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3f4a7c9f-6417-4f6d-b761-d9764b134ea4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Acquired lock "refresh_cache-271cec64-e7b4-4a1b-a7d6-f3fd60086209" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1841.274774] env[62405]: DEBUG nova.network.neutron [None req-3f4a7c9f-6417-4f6d-b761-d9764b134ea4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: 271cec64-e7b4-4a1b-a7d6-f3fd60086209] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1841.303602] env[62405]: DEBUG nova.compute.utils [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1841.308319] env[62405]: DEBUG nova.compute.manager [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] [instance: 65cd4af4-30cf-4435-8f32-501db450905f] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1841.308319] env[62405]: DEBUG nova.network.neutron [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] [instance: 65cd4af4-30cf-4435-8f32-501db450905f] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1841.340198] env[62405]: INFO nova.compute.manager [-] [instance: d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d] Took 1.27 seconds to deallocate network for instance. [ 1841.356236] env[62405]: DEBUG nova.policy [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ab96d1c81515474e93d40a003ef6d995', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '100991f695df4d998ec39be716228e1d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1841.658029] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06909437-cb8d-4b78-a0ce-fbae14852327 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.669886] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d76ecf90-405b-407f-babe-380cf02a8588 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.704692] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54a4890c-9c2d-4e30-bf10-7e43351cfa0e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.710517] env[62405]: DEBUG nova.network.neutron [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] [instance: 65cd4af4-30cf-4435-8f32-501db450905f] Successfully created port: 02e1d24b-61e9-485a-8968-37f57cd76b08 {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1841.715658] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30bcac2b-9924-4a2f-9e94-13059ca9c02f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.730950] env[62405]: DEBUG nova.compute.provider_tree [None req-a850d796-3c6b-462c-b92a-1d17ad0461e5 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1841.808711] env[62405]: DEBUG nova.compute.manager [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] [instance: 65cd4af4-30cf-4435-8f32-501db450905f] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1841.846222] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f4454d09-a9d0-41fe-b7a9-31e8732a8ffa tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1841.912129] env[62405]: DEBUG nova.network.neutron [None req-4a9d2454-f4bf-48a4-9d47-cd6399e30d0b tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] [instance: 742c8d94-48d1-4408-91dc-98f25661aa8d] Updating instance_info_cache with network_info: [{"id": "4cdbc50f-67a4-4007-ba8c-4b0690bb67c5", "address": "fa:16:3e:1b:ec:6c", "network": {"id": "d6172d52-fb9e-4751-8a35-39c4d4546683", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-778897133-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.213", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "650fd0d6b10b4b88aac64a5b51c10ee7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8f580e6-1d86-41ee-9ebe-c531cb9299c6", "external-id": "nsx-vlan-transportzone-150", "segmentation_id": 150, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4cdbc50f-67", "ovs_interfaceid": "4cdbc50f-67a4-4007-ba8c-4b0690bb67c5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1841.916707] env[62405]: DEBUG nova.network.neutron [None req-3f4a7c9f-6417-4f6d-b761-d9764b134ea4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: 271cec64-e7b4-4a1b-a7d6-f3fd60086209] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1842.142019] env[62405]: DEBUG nova.network.neutron [None req-3f4a7c9f-6417-4f6d-b761-d9764b134ea4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: 271cec64-e7b4-4a1b-a7d6-f3fd60086209] Updating instance_info_cache with network_info: [{"id": "37b28c39-c3f7-4c42-b4a2-3b9836cf0ded", "address": "fa:16:3e:c9:7b:bc", "network": {"id": "72000fdf-4f7a-4c95-a7ac-d8404249f55c", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-589425764-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "521150d8f23f4f76a0c785481c99e897", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "412cde91-d0f0-4193-b36b-d8b9d17384c6", "external-id": "nsx-vlan-transportzone-461", "segmentation_id": 461, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap37b28c39-c3", "ovs_interfaceid": "37b28c39-c3f7-4c42-b4a2-3b9836cf0ded", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1842.222907] env[62405]: DEBUG oslo_concurrency.lockutils [None req-174a367c-d208-4d4f-a2c0-0d092ce927ed tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Acquiring lock "8f133517-cff2-40c7-8333-a9116163313a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1842.222907] env[62405]: DEBUG oslo_concurrency.lockutils [None req-174a367c-d208-4d4f-a2c0-0d092ce927ed tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Lock "8f133517-cff2-40c7-8333-a9116163313a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1842.234878] env[62405]: DEBUG nova.scheduler.client.report [None req-a850d796-3c6b-462c-b92a-1d17ad0461e5 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1843.077741] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4a9d2454-f4bf-48a4-9d47-cd6399e30d0b tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Releasing lock "refresh_cache-742c8d94-48d1-4408-91dc-98f25661aa8d" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1843.078016] env[62405]: DEBUG nova.compute.manager [None req-4a9d2454-f4bf-48a4-9d47-cd6399e30d0b tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] [instance: 742c8d94-48d1-4408-91dc-98f25661aa8d] Inject network info {{(pid=62405) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7737}} [ 1843.078281] env[62405]: DEBUG nova.compute.manager [None req-4a9d2454-f4bf-48a4-9d47-cd6399e30d0b tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] [instance: 742c8d94-48d1-4408-91dc-98f25661aa8d] network_info to inject: |[{"id": "4cdbc50f-67a4-4007-ba8c-4b0690bb67c5", "address": "fa:16:3e:1b:ec:6c", "network": {"id": "d6172d52-fb9e-4751-8a35-39c4d4546683", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-778897133-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.213", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "650fd0d6b10b4b88aac64a5b51c10ee7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8f580e6-1d86-41ee-9ebe-c531cb9299c6", "external-id": "nsx-vlan-transportzone-150", "segmentation_id": 150, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4cdbc50f-67", "ovs_interfaceid": "4cdbc50f-67a4-4007-ba8c-4b0690bb67c5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7738}} [ 1843.082860] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-4a9d2454-f4bf-48a4-9d47-cd6399e30d0b tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] [instance: 742c8d94-48d1-4408-91dc-98f25661aa8d] Reconfiguring VM instance to set the machine id {{(pid=62405) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 1843.083325] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3f4a7c9f-6417-4f6d-b761-d9764b134ea4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Releasing lock "refresh_cache-271cec64-e7b4-4a1b-a7d6-f3fd60086209" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1843.083588] env[62405]: DEBUG nova.compute.manager [None req-3f4a7c9f-6417-4f6d-b761-d9764b134ea4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: 271cec64-e7b4-4a1b-a7d6-f3fd60086209] Instance network_info: |[{"id": "37b28c39-c3f7-4c42-b4a2-3b9836cf0ded", "address": "fa:16:3e:c9:7b:bc", "network": {"id": "72000fdf-4f7a-4c95-a7ac-d8404249f55c", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-589425764-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "521150d8f23f4f76a0c785481c99e897", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "412cde91-d0f0-4193-b36b-d8b9d17384c6", "external-id": "nsx-vlan-transportzone-461", "segmentation_id": 461, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap37b28c39-c3", "ovs_interfaceid": "37b28c39-c3f7-4c42-b4a2-3b9836cf0ded", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1843.083898] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7153bd6b-4d5b-44a6-9739-370310d8a0f4 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Acquiring lock "742c8d94-48d1-4408-91dc-98f25661aa8d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1843.084130] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7153bd6b-4d5b-44a6-9739-370310d8a0f4 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Lock "742c8d94-48d1-4408-91dc-98f25661aa8d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1843.084331] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7153bd6b-4d5b-44a6-9739-370310d8a0f4 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Acquiring lock "742c8d94-48d1-4408-91dc-98f25661aa8d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1843.084544] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7153bd6b-4d5b-44a6-9739-370310d8a0f4 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Lock "742c8d94-48d1-4408-91dc-98f25661aa8d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1843.084667] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7153bd6b-4d5b-44a6-9739-370310d8a0f4 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Lock "742c8d94-48d1-4408-91dc-98f25661aa8d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1843.086244] env[62405]: DEBUG nova.compute.manager [None req-174a367c-d208-4d4f-a2c0-0d092ce927ed tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] [instance: 8f133517-cff2-40c7-8333-a9116163313a] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1843.092019] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a850d796-3c6b-462c-b92a-1d17ad0461e5 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.289s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1843.092019] env[62405]: DEBUG nova.compute.manager [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] [instance: 65cd4af4-30cf-4435-8f32-501db450905f] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1843.094913] env[62405]: DEBUG nova.compute.manager [req-0e75d629-bce9-43f9-8f77-13340addc5e7 req-23f2e975-e62e-4297-aae9-f4690eff4a62 service nova] [instance: 271cec64-e7b4-4a1b-a7d6-f3fd60086209] Received event network-vif-plugged-37b28c39-c3f7-4c42-b4a2-3b9836cf0ded {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1843.095118] env[62405]: DEBUG oslo_concurrency.lockutils [req-0e75d629-bce9-43f9-8f77-13340addc5e7 req-23f2e975-e62e-4297-aae9-f4690eff4a62 service nova] Acquiring lock "271cec64-e7b4-4a1b-a7d6-f3fd60086209-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1843.099230] env[62405]: DEBUG oslo_concurrency.lockutils [req-0e75d629-bce9-43f9-8f77-13340addc5e7 req-23f2e975-e62e-4297-aae9-f4690eff4a62 service nova] Lock "271cec64-e7b4-4a1b-a7d6-f3fd60086209-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1843.099230] env[62405]: DEBUG oslo_concurrency.lockutils [req-0e75d629-bce9-43f9-8f77-13340addc5e7 req-23f2e975-e62e-4297-aae9-f4690eff4a62 service nova] Lock "271cec64-e7b4-4a1b-a7d6-f3fd60086209-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1843.099230] env[62405]: DEBUG nova.compute.manager [req-0e75d629-bce9-43f9-8f77-13340addc5e7 req-23f2e975-e62e-4297-aae9-f4690eff4a62 service nova] [instance: 271cec64-e7b4-4a1b-a7d6-f3fd60086209] No waiting events found dispatching network-vif-plugged-37b28c39-c3f7-4c42-b4a2-3b9836cf0ded {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1843.099230] env[62405]: WARNING nova.compute.manager [req-0e75d629-bce9-43f9-8f77-13340addc5e7 req-23f2e975-e62e-4297-aae9-f4690eff4a62 service nova] [instance: 271cec64-e7b4-4a1b-a7d6-f3fd60086209] Received unexpected event network-vif-plugged-37b28c39-c3f7-4c42-b4a2-3b9836cf0ded for instance with vm_state building and task_state spawning. [ 1843.099230] env[62405]: DEBUG nova.compute.manager [req-0e75d629-bce9-43f9-8f77-13340addc5e7 req-23f2e975-e62e-4297-aae9-f4690eff4a62 service nova] [instance: 271cec64-e7b4-4a1b-a7d6-f3fd60086209] Received event network-changed-37b28c39-c3f7-4c42-b4a2-3b9836cf0ded {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1843.099230] env[62405]: DEBUG nova.compute.manager [req-0e75d629-bce9-43f9-8f77-13340addc5e7 req-23f2e975-e62e-4297-aae9-f4690eff4a62 service nova] [instance: 271cec64-e7b4-4a1b-a7d6-f3fd60086209] Refreshing instance network info cache due to event network-changed-37b28c39-c3f7-4c42-b4a2-3b9836cf0ded. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1843.099230] env[62405]: DEBUG oslo_concurrency.lockutils [req-0e75d629-bce9-43f9-8f77-13340addc5e7 req-23f2e975-e62e-4297-aae9-f4690eff4a62 service nova] Acquiring lock "refresh_cache-271cec64-e7b4-4a1b-a7d6-f3fd60086209" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1843.099230] env[62405]: DEBUG oslo_concurrency.lockutils [req-0e75d629-bce9-43f9-8f77-13340addc5e7 req-23f2e975-e62e-4297-aae9-f4690eff4a62 service nova] Acquired lock "refresh_cache-271cec64-e7b4-4a1b-a7d6-f3fd60086209" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1843.099230] env[62405]: DEBUG nova.network.neutron [req-0e75d629-bce9-43f9-8f77-13340addc5e7 req-23f2e975-e62e-4297-aae9-f4690eff4a62 service nova] [instance: 271cec64-e7b4-4a1b-a7d6-f3fd60086209] Refreshing network info cache for port 37b28c39-c3f7-4c42-b4a2-3b9836cf0ded {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1843.099230] env[62405]: INFO nova.compute.manager [None req-7153bd6b-4d5b-44a6-9739-370310d8a0f4 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] [instance: 742c8d94-48d1-4408-91dc-98f25661aa8d] Terminating instance [ 1843.099920] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-14d17b6a-be69-4fa6-b101-a65464df18de {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.110236] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-3f4a7c9f-6417-4f6d-b761-d9764b134ea4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: 271cec64-e7b4-4a1b-a7d6-f3fd60086209] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c9:7b:bc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '412cde91-d0f0-4193-b36b-d8b9d17384c6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '37b28c39-c3f7-4c42-b4a2-3b9836cf0ded', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1843.118524] env[62405]: DEBUG oslo.service.loopingcall [None req-3f4a7c9f-6417-4f6d-b761-d9764b134ea4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1843.118830] env[62405]: DEBUG oslo_concurrency.lockutils [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.034s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1843.120439] env[62405]: INFO nova.compute.claims [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] [instance: 171910d2-02b8-4219-ae75-5cecccea1de3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1843.126546] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 271cec64-e7b4-4a1b-a7d6-f3fd60086209] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1843.128385] env[62405]: INFO nova.scheduler.client.report [None req-a850d796-3c6b-462c-b92a-1d17ad0461e5 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Deleted allocations for instance 2ab5f28c-1f71-4bea-8733-523e5570f5c6 [ 1843.131162] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9e06d841-9c60-457f-a72a-e16e9cc742e0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.150818] env[62405]: DEBUG oslo_vmware.api [None req-4a9d2454-f4bf-48a4-9d47-cd6399e30d0b tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Waiting for the task: (returnval){ [ 1843.150818] env[62405]: value = "task-1947655" [ 1843.150818] env[62405]: _type = "Task" [ 1843.150818] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1843.156526] env[62405]: DEBUG nova.virt.hardware [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1843.156792] env[62405]: DEBUG nova.virt.hardware [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1843.156978] env[62405]: DEBUG nova.virt.hardware [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1843.157233] env[62405]: DEBUG nova.virt.hardware [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1843.157329] env[62405]: DEBUG nova.virt.hardware [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1843.157480] env[62405]: DEBUG nova.virt.hardware [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1843.157738] env[62405]: DEBUG nova.virt.hardware [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1843.157929] env[62405]: DEBUG nova.virt.hardware [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1843.158935] env[62405]: DEBUG nova.virt.hardware [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1843.158935] env[62405]: DEBUG nova.virt.hardware [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1843.158935] env[62405]: DEBUG nova.virt.hardware [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1843.159782] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ed17111-f222-4c1b-875c-7a9a67b5507e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.164990] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1843.164990] env[62405]: value = "task-1947656" [ 1843.164990] env[62405]: _type = "Task" [ 1843.164990] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1843.172962] env[62405]: DEBUG oslo_vmware.api [None req-4a9d2454-f4bf-48a4-9d47-cd6399e30d0b tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Task: {'id': task-1947655, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1843.174936] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4478f83a-a6e2-4085-9f31-e854734805b3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.202023] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947656, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1843.403387] env[62405]: DEBUG nova.network.neutron [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] [instance: 65cd4af4-30cf-4435-8f32-501db450905f] Successfully updated port: 02e1d24b-61e9-485a-8968-37f57cd76b08 {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1843.430704] env[62405]: DEBUG nova.network.neutron [req-0e75d629-bce9-43f9-8f77-13340addc5e7 req-23f2e975-e62e-4297-aae9-f4690eff4a62 service nova] [instance: 271cec64-e7b4-4a1b-a7d6-f3fd60086209] Updated VIF entry in instance network info cache for port 37b28c39-c3f7-4c42-b4a2-3b9836cf0ded. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1843.431102] env[62405]: DEBUG nova.network.neutron [req-0e75d629-bce9-43f9-8f77-13340addc5e7 req-23f2e975-e62e-4297-aae9-f4690eff4a62 service nova] [instance: 271cec64-e7b4-4a1b-a7d6-f3fd60086209] Updating instance_info_cache with network_info: [{"id": "37b28c39-c3f7-4c42-b4a2-3b9836cf0ded", "address": "fa:16:3e:c9:7b:bc", "network": {"id": "72000fdf-4f7a-4c95-a7ac-d8404249f55c", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-589425764-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "521150d8f23f4f76a0c785481c99e897", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "412cde91-d0f0-4193-b36b-d8b9d17384c6", "external-id": "nsx-vlan-transportzone-461", "segmentation_id": 461, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap37b28c39-c3", "ovs_interfaceid": "37b28c39-c3f7-4c42-b4a2-3b9836cf0ded", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1843.613091] env[62405]: DEBUG oslo_concurrency.lockutils [None req-174a367c-d208-4d4f-a2c0-0d092ce927ed tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1843.629033] env[62405]: DEBUG nova.compute.manager [None req-7153bd6b-4d5b-44a6-9739-370310d8a0f4 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] [instance: 742c8d94-48d1-4408-91dc-98f25661aa8d] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1843.629289] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-7153bd6b-4d5b-44a6-9739-370310d8a0f4 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] [instance: 742c8d94-48d1-4408-91dc-98f25661aa8d] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1843.630216] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c37a00b8-5e82-4f82-84bc-59d2aea85526 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.641293] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-7153bd6b-4d5b-44a6-9739-370310d8a0f4 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] [instance: 742c8d94-48d1-4408-91dc-98f25661aa8d] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1843.642354] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-99de910d-98e9-477a-aea1-015ba84cd7ca {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.654726] env[62405]: DEBUG oslo_vmware.api [None req-7153bd6b-4d5b-44a6-9739-370310d8a0f4 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Waiting for the task: (returnval){ [ 1843.654726] env[62405]: value = "task-1947657" [ 1843.654726] env[62405]: _type = "Task" [ 1843.654726] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1843.659042] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a850d796-3c6b-462c-b92a-1d17ad0461e5 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Lock "2ab5f28c-1f71-4bea-8733-523e5570f5c6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.599s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1843.675680] env[62405]: DEBUG oslo_vmware.api [None req-7153bd6b-4d5b-44a6-9739-370310d8a0f4 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Task: {'id': task-1947657, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1843.675840] env[62405]: DEBUG oslo_vmware.api [None req-4a9d2454-f4bf-48a4-9d47-cd6399e30d0b tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Task: {'id': task-1947655, 'name': ReconfigVM_Task, 'duration_secs': 0.187789} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1843.677290] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-4a9d2454-f4bf-48a4-9d47-cd6399e30d0b tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] [instance: 742c8d94-48d1-4408-91dc-98f25661aa8d] Reconfigured VM instance to set the machine id {{(pid=62405) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 1843.683037] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947656, 'name': CreateVM_Task, 'duration_secs': 0.409801} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1843.683363] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 271cec64-e7b4-4a1b-a7d6-f3fd60086209] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1843.684050] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3f4a7c9f-6417-4f6d-b761-d9764b134ea4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1843.684222] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3f4a7c9f-6417-4f6d-b761-d9764b134ea4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1843.685296] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3f4a7c9f-6417-4f6d-b761-d9764b134ea4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1843.685296] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8abe86f1-83c6-4a7f-b4f9-1562079ad0eb {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.693009] env[62405]: DEBUG oslo_vmware.api [None req-3f4a7c9f-6417-4f6d-b761-d9764b134ea4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Waiting for the task: (returnval){ [ 1843.693009] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52c433d5-2061-a44b-a0dd-aad782541517" [ 1843.693009] env[62405]: _type = "Task" [ 1843.693009] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1843.703448] env[62405]: DEBUG oslo_vmware.api [None req-3f4a7c9f-6417-4f6d-b761-d9764b134ea4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52c433d5-2061-a44b-a0dd-aad782541517, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1843.710520] env[62405]: DEBUG oslo_concurrency.lockutils [None req-bd4a99f6-8b54-405c-a423-6153c013b8aa tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquiring lock "interface-9d97bf1d-6830-48b1-831b-bf2b52188f32-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1843.711153] env[62405]: DEBUG oslo_concurrency.lockutils [None req-bd4a99f6-8b54-405c-a423-6153c013b8aa tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Lock "interface-9d97bf1d-6830-48b1-831b-bf2b52188f32-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1843.711153] env[62405]: DEBUG nova.objects.instance [None req-bd4a99f6-8b54-405c-a423-6153c013b8aa tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Lazy-loading 'flavor' on Instance uuid 9d97bf1d-6830-48b1-831b-bf2b52188f32 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1843.906055] env[62405]: DEBUG oslo_concurrency.lockutils [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Acquiring lock "refresh_cache-65cd4af4-30cf-4435-8f32-501db450905f" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1843.906055] env[62405]: DEBUG oslo_concurrency.lockutils [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Acquired lock "refresh_cache-65cd4af4-30cf-4435-8f32-501db450905f" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1843.906055] env[62405]: DEBUG nova.network.neutron [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] [instance: 65cd4af4-30cf-4435-8f32-501db450905f] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1843.934017] env[62405]: DEBUG oslo_concurrency.lockutils [req-0e75d629-bce9-43f9-8f77-13340addc5e7 req-23f2e975-e62e-4297-aae9-f4690eff4a62 service nova] Releasing lock "refresh_cache-271cec64-e7b4-4a1b-a7d6-f3fd60086209" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1843.934325] env[62405]: DEBUG nova.compute.manager [req-0e75d629-bce9-43f9-8f77-13340addc5e7 req-23f2e975-e62e-4297-aae9-f4690eff4a62 service nova] [instance: 742c8d94-48d1-4408-91dc-98f25661aa8d] Received event network-changed-4cdbc50f-67a4-4007-ba8c-4b0690bb67c5 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1843.934492] env[62405]: DEBUG nova.compute.manager [req-0e75d629-bce9-43f9-8f77-13340addc5e7 req-23f2e975-e62e-4297-aae9-f4690eff4a62 service nova] [instance: 742c8d94-48d1-4408-91dc-98f25661aa8d] Refreshing instance network info cache due to event network-changed-4cdbc50f-67a4-4007-ba8c-4b0690bb67c5. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1843.934695] env[62405]: DEBUG oslo_concurrency.lockutils [req-0e75d629-bce9-43f9-8f77-13340addc5e7 req-23f2e975-e62e-4297-aae9-f4690eff4a62 service nova] Acquiring lock "refresh_cache-742c8d94-48d1-4408-91dc-98f25661aa8d" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1843.934835] env[62405]: DEBUG oslo_concurrency.lockutils [req-0e75d629-bce9-43f9-8f77-13340addc5e7 req-23f2e975-e62e-4297-aae9-f4690eff4a62 service nova] Acquired lock "refresh_cache-742c8d94-48d1-4408-91dc-98f25661aa8d" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1843.934992] env[62405]: DEBUG nova.network.neutron [req-0e75d629-bce9-43f9-8f77-13340addc5e7 req-23f2e975-e62e-4297-aae9-f4690eff4a62 service nova] [instance: 742c8d94-48d1-4408-91dc-98f25661aa8d] Refreshing network info cache for port 4cdbc50f-67a4-4007-ba8c-4b0690bb67c5 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1844.171360] env[62405]: DEBUG oslo_vmware.api [None req-7153bd6b-4d5b-44a6-9739-370310d8a0f4 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Task: {'id': task-1947657, 'name': PowerOffVM_Task, 'duration_secs': 0.172936} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1844.173888] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-7153bd6b-4d5b-44a6-9739-370310d8a0f4 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] [instance: 742c8d94-48d1-4408-91dc-98f25661aa8d] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1844.174084] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-7153bd6b-4d5b-44a6-9739-370310d8a0f4 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] [instance: 742c8d94-48d1-4408-91dc-98f25661aa8d] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1844.174902] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f537970b-9f2f-449c-bb24-0ac8c7b3c7bf {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.203857] env[62405]: DEBUG oslo_vmware.api [None req-3f4a7c9f-6417-4f6d-b761-d9764b134ea4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52c433d5-2061-a44b-a0dd-aad782541517, 'name': SearchDatastore_Task, 'duration_secs': 0.014496} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1844.204299] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3f4a7c9f-6417-4f6d-b761-d9764b134ea4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1844.204558] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-3f4a7c9f-6417-4f6d-b761-d9764b134ea4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: 271cec64-e7b4-4a1b-a7d6-f3fd60086209] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1844.204737] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3f4a7c9f-6417-4f6d-b761-d9764b134ea4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1844.204887] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3f4a7c9f-6417-4f6d-b761-d9764b134ea4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1844.205083] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-3f4a7c9f-6417-4f6d-b761-d9764b134ea4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1844.205383] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6f8bba25-2a6e-42c4-9d23-1e0b2e73d67e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.216138] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-3f4a7c9f-6417-4f6d-b761-d9764b134ea4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1844.216138] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-3f4a7c9f-6417-4f6d-b761-d9764b134ea4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1844.218317] env[62405]: DEBUG nova.objects.instance [None req-bd4a99f6-8b54-405c-a423-6153c013b8aa tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Lazy-loading 'pci_requests' on Instance uuid 9d97bf1d-6830-48b1-831b-bf2b52188f32 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1844.219488] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2f040165-9765-4946-93e6-89fdc2321ef6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.228905] env[62405]: DEBUG oslo_vmware.api [None req-3f4a7c9f-6417-4f6d-b761-d9764b134ea4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Waiting for the task: (returnval){ [ 1844.228905] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]523927f6-4c7e-a6f7-eb56-34ea3fea2109" [ 1844.228905] env[62405]: _type = "Task" [ 1844.228905] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1844.239036] env[62405]: DEBUG oslo_vmware.api [None req-3f4a7c9f-6417-4f6d-b761-d9764b134ea4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]523927f6-4c7e-a6f7-eb56-34ea3fea2109, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1844.274475] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-7153bd6b-4d5b-44a6-9739-370310d8a0f4 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] [instance: 742c8d94-48d1-4408-91dc-98f25661aa8d] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1844.274697] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-7153bd6b-4d5b-44a6-9739-370310d8a0f4 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] [instance: 742c8d94-48d1-4408-91dc-98f25661aa8d] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1844.274882] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-7153bd6b-4d5b-44a6-9739-370310d8a0f4 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Deleting the datastore file [datastore1] 742c8d94-48d1-4408-91dc-98f25661aa8d {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1844.275198] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-192d5750-b345-4e83-9589-eba8ecf2aafd {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.284709] env[62405]: DEBUG oslo_vmware.api [None req-7153bd6b-4d5b-44a6-9739-370310d8a0f4 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Waiting for the task: (returnval){ [ 1844.284709] env[62405]: value = "task-1947659" [ 1844.284709] env[62405]: _type = "Task" [ 1844.284709] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1844.295748] env[62405]: DEBUG oslo_vmware.api [None req-7153bd6b-4d5b-44a6-9739-370310d8a0f4 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Task: {'id': task-1947659, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1844.440678] env[62405]: DEBUG nova.network.neutron [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] [instance: 65cd4af4-30cf-4435-8f32-501db450905f] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1844.480881] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b3fbf91-df3d-4e28-9c12-183e7c5f7a02 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.488712] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a048611-e451-4c66-b5be-de289deea810 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.527956] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b460b122-24e9-48ae-912b-0928863efe5d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.536308] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de1c8b57-dfd3-4541-94c4-ab31aabad42b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.550030] env[62405]: DEBUG nova.compute.provider_tree [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1844.612958] env[62405]: DEBUG nova.network.neutron [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] [instance: 65cd4af4-30cf-4435-8f32-501db450905f] Updating instance_info_cache with network_info: [{"id": "02e1d24b-61e9-485a-8968-37f57cd76b08", "address": "fa:16:3e:d2:fb:59", "network": {"id": "9834dd6d-7842-48af-96fe-a573b230bfce", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1868330878-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "100991f695df4d998ec39be716228e1d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "975b168a-03e5-449d-95ac-4d51ba027242", "external-id": "nsx-vlan-transportzone-365", "segmentation_id": 365, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap02e1d24b-61", "ovs_interfaceid": "02e1d24b-61e9-485a-8968-37f57cd76b08", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1844.686174] env[62405]: DEBUG nova.network.neutron [req-0e75d629-bce9-43f9-8f77-13340addc5e7 req-23f2e975-e62e-4297-aae9-f4690eff4a62 service nova] [instance: 742c8d94-48d1-4408-91dc-98f25661aa8d] Updated VIF entry in instance network info cache for port 4cdbc50f-67a4-4007-ba8c-4b0690bb67c5. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1844.686560] env[62405]: DEBUG nova.network.neutron [req-0e75d629-bce9-43f9-8f77-13340addc5e7 req-23f2e975-e62e-4297-aae9-f4690eff4a62 service nova] [instance: 742c8d94-48d1-4408-91dc-98f25661aa8d] Updating instance_info_cache with network_info: [{"id": "4cdbc50f-67a4-4007-ba8c-4b0690bb67c5", "address": "fa:16:3e:1b:ec:6c", "network": {"id": "d6172d52-fb9e-4751-8a35-39c4d4546683", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-778897133-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.213", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "650fd0d6b10b4b88aac64a5b51c10ee7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8f580e6-1d86-41ee-9ebe-c531cb9299c6", "external-id": "nsx-vlan-transportzone-150", "segmentation_id": 150, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4cdbc50f-67", "ovs_interfaceid": "4cdbc50f-67a4-4007-ba8c-4b0690bb67c5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1844.724895] env[62405]: DEBUG nova.objects.base [None req-bd4a99f6-8b54-405c-a423-6153c013b8aa tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Object Instance<9d97bf1d-6830-48b1-831b-bf2b52188f32> lazy-loaded attributes: flavor,pci_requests {{(pid=62405) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1844.725135] env[62405]: DEBUG nova.network.neutron [None req-bd4a99f6-8b54-405c-a423-6153c013b8aa tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1844.740372] env[62405]: DEBUG oslo_vmware.api [None req-3f4a7c9f-6417-4f6d-b761-d9764b134ea4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]523927f6-4c7e-a6f7-eb56-34ea3fea2109, 'name': SearchDatastore_Task, 'duration_secs': 0.009283} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1844.740953] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d60dee49-509e-4e1c-b586-0ceca0f7d2f6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.747368] env[62405]: DEBUG oslo_vmware.api [None req-3f4a7c9f-6417-4f6d-b761-d9764b134ea4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Waiting for the task: (returnval){ [ 1844.747368] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]527420d8-7487-f2c4-8a06-7dea2ebebfca" [ 1844.747368] env[62405]: _type = "Task" [ 1844.747368] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1844.756166] env[62405]: DEBUG oslo_vmware.api [None req-3f4a7c9f-6417-4f6d-b761-d9764b134ea4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]527420d8-7487-f2c4-8a06-7dea2ebebfca, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1844.786463] env[62405]: DEBUG nova.compute.manager [req-292763d4-6cde-4348-80e5-8b53c69b1ab7 req-76542a3f-b3ea-4d80-9919-59ce465f292c service nova] [instance: 65cd4af4-30cf-4435-8f32-501db450905f] Received event network-vif-plugged-02e1d24b-61e9-485a-8968-37f57cd76b08 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1844.786686] env[62405]: DEBUG oslo_concurrency.lockutils [req-292763d4-6cde-4348-80e5-8b53c69b1ab7 req-76542a3f-b3ea-4d80-9919-59ce465f292c service nova] Acquiring lock "65cd4af4-30cf-4435-8f32-501db450905f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1844.786891] env[62405]: DEBUG oslo_concurrency.lockutils [req-292763d4-6cde-4348-80e5-8b53c69b1ab7 req-76542a3f-b3ea-4d80-9919-59ce465f292c service nova] Lock "65cd4af4-30cf-4435-8f32-501db450905f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1844.787128] env[62405]: DEBUG oslo_concurrency.lockutils [req-292763d4-6cde-4348-80e5-8b53c69b1ab7 req-76542a3f-b3ea-4d80-9919-59ce465f292c service nova] Lock "65cd4af4-30cf-4435-8f32-501db450905f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1844.787310] env[62405]: DEBUG nova.compute.manager [req-292763d4-6cde-4348-80e5-8b53c69b1ab7 req-76542a3f-b3ea-4d80-9919-59ce465f292c service nova] [instance: 65cd4af4-30cf-4435-8f32-501db450905f] No waiting events found dispatching network-vif-plugged-02e1d24b-61e9-485a-8968-37f57cd76b08 {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1844.787478] env[62405]: WARNING nova.compute.manager [req-292763d4-6cde-4348-80e5-8b53c69b1ab7 req-76542a3f-b3ea-4d80-9919-59ce465f292c service nova] [instance: 65cd4af4-30cf-4435-8f32-501db450905f] Received unexpected event network-vif-plugged-02e1d24b-61e9-485a-8968-37f57cd76b08 for instance with vm_state building and task_state spawning. [ 1844.787640] env[62405]: DEBUG nova.compute.manager [req-292763d4-6cde-4348-80e5-8b53c69b1ab7 req-76542a3f-b3ea-4d80-9919-59ce465f292c service nova] [instance: 65cd4af4-30cf-4435-8f32-501db450905f] Received event network-changed-02e1d24b-61e9-485a-8968-37f57cd76b08 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1844.787801] env[62405]: DEBUG nova.compute.manager [req-292763d4-6cde-4348-80e5-8b53c69b1ab7 req-76542a3f-b3ea-4d80-9919-59ce465f292c service nova] [instance: 65cd4af4-30cf-4435-8f32-501db450905f] Refreshing instance network info cache due to event network-changed-02e1d24b-61e9-485a-8968-37f57cd76b08. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1844.787973] env[62405]: DEBUG oslo_concurrency.lockutils [req-292763d4-6cde-4348-80e5-8b53c69b1ab7 req-76542a3f-b3ea-4d80-9919-59ce465f292c service nova] Acquiring lock "refresh_cache-65cd4af4-30cf-4435-8f32-501db450905f" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1844.799284] env[62405]: DEBUG oslo_vmware.api [None req-7153bd6b-4d5b-44a6-9739-370310d8a0f4 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Task: {'id': task-1947659, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.13507} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1844.799554] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-7153bd6b-4d5b-44a6-9739-370310d8a0f4 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1844.799736] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-7153bd6b-4d5b-44a6-9739-370310d8a0f4 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] [instance: 742c8d94-48d1-4408-91dc-98f25661aa8d] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1844.799911] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-7153bd6b-4d5b-44a6-9739-370310d8a0f4 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] [instance: 742c8d94-48d1-4408-91dc-98f25661aa8d] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1844.800136] env[62405]: INFO nova.compute.manager [None req-7153bd6b-4d5b-44a6-9739-370310d8a0f4 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] [instance: 742c8d94-48d1-4408-91dc-98f25661aa8d] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1844.800339] env[62405]: DEBUG oslo.service.loopingcall [None req-7153bd6b-4d5b-44a6-9739-370310d8a0f4 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1844.800809] env[62405]: DEBUG nova.compute.manager [-] [instance: 742c8d94-48d1-4408-91dc-98f25661aa8d] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1844.800904] env[62405]: DEBUG nova.network.neutron [-] [instance: 742c8d94-48d1-4408-91dc-98f25661aa8d] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1844.810892] env[62405]: DEBUG oslo_concurrency.lockutils [None req-bd4a99f6-8b54-405c-a423-6153c013b8aa tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Lock "interface-9d97bf1d-6830-48b1-831b-bf2b52188f32-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.100s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1845.071757] env[62405]: ERROR nova.scheduler.client.report [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] [req-873bc527-92fd-4511-b4ec-b6bc1d6c674f] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7d5eded7-a501-4fa6-b1d3-60e273d555d7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-873bc527-92fd-4511-b4ec-b6bc1d6c674f"}]} [ 1845.087664] env[62405]: DEBUG nova.scheduler.client.report [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Refreshing inventories for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1845.100787] env[62405]: DEBUG nova.scheduler.client.report [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Updating ProviderTree inventory for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1845.101030] env[62405]: DEBUG nova.compute.provider_tree [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1845.113064] env[62405]: DEBUG nova.scheduler.client.report [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Refreshing aggregate associations for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7, aggregates: None {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1845.115117] env[62405]: DEBUG oslo_concurrency.lockutils [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Releasing lock "refresh_cache-65cd4af4-30cf-4435-8f32-501db450905f" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1845.115421] env[62405]: DEBUG nova.compute.manager [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] [instance: 65cd4af4-30cf-4435-8f32-501db450905f] Instance network_info: |[{"id": "02e1d24b-61e9-485a-8968-37f57cd76b08", "address": "fa:16:3e:d2:fb:59", "network": {"id": "9834dd6d-7842-48af-96fe-a573b230bfce", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1868330878-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "100991f695df4d998ec39be716228e1d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "975b168a-03e5-449d-95ac-4d51ba027242", "external-id": "nsx-vlan-transportzone-365", "segmentation_id": 365, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap02e1d24b-61", "ovs_interfaceid": "02e1d24b-61e9-485a-8968-37f57cd76b08", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1845.115900] env[62405]: DEBUG oslo_concurrency.lockutils [req-292763d4-6cde-4348-80e5-8b53c69b1ab7 req-76542a3f-b3ea-4d80-9919-59ce465f292c service nova] Acquired lock "refresh_cache-65cd4af4-30cf-4435-8f32-501db450905f" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1845.116092] env[62405]: DEBUG nova.network.neutron [req-292763d4-6cde-4348-80e5-8b53c69b1ab7 req-76542a3f-b3ea-4d80-9919-59ce465f292c service nova] [instance: 65cd4af4-30cf-4435-8f32-501db450905f] Refreshing network info cache for port 02e1d24b-61e9-485a-8968-37f57cd76b08 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1845.117339] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] [instance: 65cd4af4-30cf-4435-8f32-501db450905f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d2:fb:59', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '975b168a-03e5-449d-95ac-4d51ba027242', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '02e1d24b-61e9-485a-8968-37f57cd76b08', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1845.125034] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Creating folder: Project (100991f695df4d998ec39be716228e1d). Parent ref: group-v401284. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1845.125501] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e730ba07-f9f8-4ee9-9e1c-f4ec7aac9cd0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.132309] env[62405]: DEBUG nova.scheduler.client.report [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Refreshing trait associations for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1845.137090] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Created folder: Project (100991f695df4d998ec39be716228e1d) in parent group-v401284. [ 1845.137301] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Creating folder: Instances. Parent ref: group-v401510. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1845.137540] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-776abfba-e417-4c2e-8125-4d9aa647e343 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.149613] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Created folder: Instances in parent group-v401510. [ 1845.149859] env[62405]: DEBUG oslo.service.loopingcall [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1845.150071] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 65cd4af4-30cf-4435-8f32-501db450905f] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1845.150281] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-723410f1-c070-4c1f-ae48-1b9c578b58ef {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.171503] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8fb3f872-37de-4222-9954-c11fb8576251 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Acquiring lock "79548471-56f8-410c-a664-d2242541cd2a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1845.171805] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8fb3f872-37de-4222-9954-c11fb8576251 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Lock "79548471-56f8-410c-a664-d2242541cd2a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1845.171933] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8fb3f872-37de-4222-9954-c11fb8576251 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Acquiring lock "79548471-56f8-410c-a664-d2242541cd2a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1845.172125] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8fb3f872-37de-4222-9954-c11fb8576251 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Lock "79548471-56f8-410c-a664-d2242541cd2a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1845.172295] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8fb3f872-37de-4222-9954-c11fb8576251 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Lock "79548471-56f8-410c-a664-d2242541cd2a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1845.175485] env[62405]: INFO nova.compute.manager [None req-8fb3f872-37de-4222-9954-c11fb8576251 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 79548471-56f8-410c-a664-d2242541cd2a] Terminating instance [ 1845.190579] env[62405]: DEBUG oslo_concurrency.lockutils [req-0e75d629-bce9-43f9-8f77-13340addc5e7 req-23f2e975-e62e-4297-aae9-f4690eff4a62 service nova] Releasing lock "refresh_cache-742c8d94-48d1-4408-91dc-98f25661aa8d" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1845.192770] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1845.192770] env[62405]: value = "task-1947662" [ 1845.192770] env[62405]: _type = "Task" [ 1845.192770] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1845.202122] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947662, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1845.262476] env[62405]: DEBUG oslo_vmware.api [None req-3f4a7c9f-6417-4f6d-b761-d9764b134ea4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]527420d8-7487-f2c4-8a06-7dea2ebebfca, 'name': SearchDatastore_Task, 'duration_secs': 0.010425} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1845.265463] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3f4a7c9f-6417-4f6d-b761-d9764b134ea4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1845.265754] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f4a7c9f-6417-4f6d-b761-d9764b134ea4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 271cec64-e7b4-4a1b-a7d6-f3fd60086209/271cec64-e7b4-4a1b-a7d6-f3fd60086209.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1845.266293] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4cd278c3-efd1-45c4-9947-d955d841b819 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.274242] env[62405]: DEBUG oslo_vmware.api [None req-3f4a7c9f-6417-4f6d-b761-d9764b134ea4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Waiting for the task: (returnval){ [ 1845.274242] env[62405]: value = "task-1947663" [ 1845.274242] env[62405]: _type = "Task" [ 1845.274242] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1845.286385] env[62405]: DEBUG oslo_vmware.api [None req-3f4a7c9f-6417-4f6d-b761-d9764b134ea4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': task-1947663, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1845.574447] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04026d6a-b073-4729-ae98-74535188f515 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.588030] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc4a3e1d-0193-44db-922a-a06686be22da {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.622974] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33e9295b-546f-4418-8978-3ea1dfc46d52 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.636707] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c13af60-ee0e-48de-ab24-aeee140ed7b0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.653723] env[62405]: DEBUG nova.compute.provider_tree [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1845.680205] env[62405]: DEBUG nova.compute.manager [None req-8fb3f872-37de-4222-9954-c11fb8576251 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 79548471-56f8-410c-a664-d2242541cd2a] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1845.680443] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-8fb3f872-37de-4222-9954-c11fb8576251 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 79548471-56f8-410c-a664-d2242541cd2a] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1845.681361] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a30c3c9-796f-4621-8340-b314968cc8d2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.691552] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-8fb3f872-37de-4222-9954-c11fb8576251 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 79548471-56f8-410c-a664-d2242541cd2a] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1845.691884] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a762caf1-55be-4953-be2e-048d32c7c0bf {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.705103] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947662, 'name': CreateVM_Task, 'duration_secs': 0.407537} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1845.706509] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 65cd4af4-30cf-4435-8f32-501db450905f] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1845.706913] env[62405]: DEBUG oslo_vmware.api [None req-8fb3f872-37de-4222-9954-c11fb8576251 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Waiting for the task: (returnval){ [ 1845.706913] env[62405]: value = "task-1947664" [ 1845.706913] env[62405]: _type = "Task" [ 1845.706913] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1845.707675] env[62405]: DEBUG oslo_concurrency.lockutils [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1845.707842] env[62405]: DEBUG oslo_concurrency.lockutils [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1845.708264] env[62405]: DEBUG oslo_concurrency.lockutils [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1845.708653] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f3150401-1082-4ac7-a67b-37ed6ab5baef {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.721894] env[62405]: DEBUG oslo_vmware.api [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Waiting for the task: (returnval){ [ 1845.721894] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5215f53c-0d25-4f3c-0ba8-8e65457d78fe" [ 1845.721894] env[62405]: _type = "Task" [ 1845.721894] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1845.722157] env[62405]: DEBUG oslo_vmware.api [None req-8fb3f872-37de-4222-9954-c11fb8576251 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947664, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1845.733547] env[62405]: DEBUG oslo_vmware.api [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5215f53c-0d25-4f3c-0ba8-8e65457d78fe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1845.785797] env[62405]: DEBUG oslo_vmware.api [None req-3f4a7c9f-6417-4f6d-b761-d9764b134ea4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': task-1947663, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.487167} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1845.788453] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f4a7c9f-6417-4f6d-b761-d9764b134ea4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 271cec64-e7b4-4a1b-a7d6-f3fd60086209/271cec64-e7b4-4a1b-a7d6-f3fd60086209.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1845.788669] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-3f4a7c9f-6417-4f6d-b761-d9764b134ea4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: 271cec64-e7b4-4a1b-a7d6-f3fd60086209] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1845.788934] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-041458e9-fd14-402d-aa55-e6a3590a3553 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.797389] env[62405]: DEBUG oslo_vmware.api [None req-3f4a7c9f-6417-4f6d-b761-d9764b134ea4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Waiting for the task: (returnval){ [ 1845.797389] env[62405]: value = "task-1947665" [ 1845.797389] env[62405]: _type = "Task" [ 1845.797389] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1845.805593] env[62405]: DEBUG oslo_vmware.api [None req-3f4a7c9f-6417-4f6d-b761-d9764b134ea4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': task-1947665, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1845.918690] env[62405]: DEBUG nova.network.neutron [req-292763d4-6cde-4348-80e5-8b53c69b1ab7 req-76542a3f-b3ea-4d80-9919-59ce465f292c service nova] [instance: 65cd4af4-30cf-4435-8f32-501db450905f] Updated VIF entry in instance network info cache for port 02e1d24b-61e9-485a-8968-37f57cd76b08. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1845.919056] env[62405]: DEBUG nova.network.neutron [req-292763d4-6cde-4348-80e5-8b53c69b1ab7 req-76542a3f-b3ea-4d80-9919-59ce465f292c service nova] [instance: 65cd4af4-30cf-4435-8f32-501db450905f] Updating instance_info_cache with network_info: [{"id": "02e1d24b-61e9-485a-8968-37f57cd76b08", "address": "fa:16:3e:d2:fb:59", "network": {"id": "9834dd6d-7842-48af-96fe-a573b230bfce", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1868330878-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "100991f695df4d998ec39be716228e1d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "975b168a-03e5-449d-95ac-4d51ba027242", "external-id": "nsx-vlan-transportzone-365", "segmentation_id": 365, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap02e1d24b-61", "ovs_interfaceid": "02e1d24b-61e9-485a-8968-37f57cd76b08", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1845.920304] env[62405]: DEBUG nova.network.neutron [-] [instance: 742c8d94-48d1-4408-91dc-98f25661aa8d] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1846.010100] env[62405]: DEBUG nova.compute.manager [req-2d66bf91-87af-4aff-82cd-d93840cfd593 req-bf5ad41c-76ce-4659-946b-15e316d48eaf service nova] [instance: 742c8d94-48d1-4408-91dc-98f25661aa8d] Received event network-vif-deleted-4cdbc50f-67a4-4007-ba8c-4b0690bb67c5 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1846.029691] env[62405]: DEBUG oslo_concurrency.lockutils [None req-bf5fbeee-54e7-4270-8309-84e1125402f3 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Acquiring lock "59fe34ab-c01d-4083-8bcd-ad6b4133a66f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1846.029931] env[62405]: DEBUG oslo_concurrency.lockutils [None req-bf5fbeee-54e7-4270-8309-84e1125402f3 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Lock "59fe34ab-c01d-4083-8bcd-ad6b4133a66f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1846.177027] env[62405]: ERROR nova.scheduler.client.report [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] [req-cfef6204-9b2f-4921-a722-63e9e3a9755c] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7d5eded7-a501-4fa6-b1d3-60e273d555d7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-cfef6204-9b2f-4921-a722-63e9e3a9755c"}]} [ 1846.193761] env[62405]: DEBUG nova.scheduler.client.report [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Refreshing inventories for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1846.210320] env[62405]: DEBUG nova.scheduler.client.report [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Updating ProviderTree inventory for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1846.210555] env[62405]: DEBUG nova.compute.provider_tree [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1846.224059] env[62405]: DEBUG oslo_vmware.api [None req-8fb3f872-37de-4222-9954-c11fb8576251 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947664, 'name': PowerOffVM_Task, 'duration_secs': 0.211764} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1846.225027] env[62405]: DEBUG nova.scheduler.client.report [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Refreshing aggregate associations for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7, aggregates: None {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1846.230160] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-8fb3f872-37de-4222-9954-c11fb8576251 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 79548471-56f8-410c-a664-d2242541cd2a] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1846.230363] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-8fb3f872-37de-4222-9954-c11fb8576251 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 79548471-56f8-410c-a664-d2242541cd2a] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1846.230817] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-881eabc1-a0cb-4279-a7d1-1e423a80aea5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.238596] env[62405]: DEBUG oslo_vmware.api [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5215f53c-0d25-4f3c-0ba8-8e65457d78fe, 'name': SearchDatastore_Task, 'duration_secs': 0.028879} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1846.238890] env[62405]: DEBUG oslo_concurrency.lockutils [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1846.239131] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] [instance: 65cd4af4-30cf-4435-8f32-501db450905f] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1846.239383] env[62405]: DEBUG oslo_concurrency.lockutils [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1846.240030] env[62405]: DEBUG oslo_concurrency.lockutils [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1846.240030] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1846.240030] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3cdf27ca-2741-4bd0-a10b-cacb655204c9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.248921] env[62405]: DEBUG nova.scheduler.client.report [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Refreshing trait associations for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1846.252374] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1846.252565] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1846.253331] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d1c4f16f-3c77-43bb-8660-cd38f59613fe {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.259821] env[62405]: DEBUG oslo_vmware.api [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Waiting for the task: (returnval){ [ 1846.259821] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]524f248f-e55d-548a-a0a7-b791389d3fc5" [ 1846.259821] env[62405]: _type = "Task" [ 1846.259821] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1846.269362] env[62405]: DEBUG oslo_vmware.api [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]524f248f-e55d-548a-a0a7-b791389d3fc5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1846.310659] env[62405]: DEBUG oslo_vmware.api [None req-3f4a7c9f-6417-4f6d-b761-d9764b134ea4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': task-1947665, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077549} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1846.313417] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-3f4a7c9f-6417-4f6d-b761-d9764b134ea4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: 271cec64-e7b4-4a1b-a7d6-f3fd60086209] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1846.314422] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cad818d1-6301-4787-b5f5-f1e0e32c6b6b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.323158] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-8fb3f872-37de-4222-9954-c11fb8576251 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 79548471-56f8-410c-a664-d2242541cd2a] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1846.323493] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-8fb3f872-37de-4222-9954-c11fb8576251 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 79548471-56f8-410c-a664-d2242541cd2a] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1846.323681] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-8fb3f872-37de-4222-9954-c11fb8576251 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Deleting the datastore file [datastore1] 79548471-56f8-410c-a664-d2242541cd2a {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1846.324202] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1d7a2842-a701-4226-8e17-06adbd047394 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.348163] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-3f4a7c9f-6417-4f6d-b761-d9764b134ea4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: 271cec64-e7b4-4a1b-a7d6-f3fd60086209] Reconfiguring VM instance instance-0000004f to attach disk [datastore1] 271cec64-e7b4-4a1b-a7d6-f3fd60086209/271cec64-e7b4-4a1b-a7d6-f3fd60086209.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1846.349730] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-70a7912a-0ddb-4be7-8d90-4215f1669824 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.366860] env[62405]: DEBUG oslo_vmware.api [None req-8fb3f872-37de-4222-9954-c11fb8576251 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Waiting for the task: (returnval){ [ 1846.366860] env[62405]: value = "task-1947667" [ 1846.366860] env[62405]: _type = "Task" [ 1846.366860] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1846.375803] env[62405]: DEBUG oslo_vmware.api [None req-3f4a7c9f-6417-4f6d-b761-d9764b134ea4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Waiting for the task: (returnval){ [ 1846.375803] env[62405]: value = "task-1947668" [ 1846.375803] env[62405]: _type = "Task" [ 1846.375803] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1846.379136] env[62405]: DEBUG oslo_vmware.api [None req-8fb3f872-37de-4222-9954-c11fb8576251 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947667, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1846.389831] env[62405]: DEBUG oslo_vmware.api [None req-3f4a7c9f-6417-4f6d-b761-d9764b134ea4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': task-1947668, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1846.424100] env[62405]: DEBUG oslo_concurrency.lockutils [req-292763d4-6cde-4348-80e5-8b53c69b1ab7 req-76542a3f-b3ea-4d80-9919-59ce465f292c service nova] Releasing lock "refresh_cache-65cd4af4-30cf-4435-8f32-501db450905f" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1846.424809] env[62405]: INFO nova.compute.manager [-] [instance: 742c8d94-48d1-4408-91dc-98f25661aa8d] Took 1.62 seconds to deallocate network for instance. [ 1846.532183] env[62405]: DEBUG nova.compute.manager [None req-bf5fbeee-54e7-4270-8309-84e1125402f3 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59fe34ab-c01d-4083-8bcd-ad6b4133a66f] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1846.626302] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fca9101d-ddfd-4500-956a-5f9a1d3c3b84 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.634719] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22d302ab-382f-49f5-9771-b75e3c1094e6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.668948] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34828d05-58cd-44e5-ba69-24f9a861f463 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.677506] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6190b36-4ead-4c63-8b05-cdf8a0d7905f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.691156] env[62405]: DEBUG nova.compute.provider_tree [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1846.770814] env[62405]: DEBUG oslo_vmware.api [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]524f248f-e55d-548a-a0a7-b791389d3fc5, 'name': SearchDatastore_Task, 'duration_secs': 0.014364} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1846.771631] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-52bde57f-811d-47a6-818a-6375f3a86182 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.778288] env[62405]: DEBUG oslo_vmware.api [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Waiting for the task: (returnval){ [ 1846.778288] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52b806f3-659b-a2b4-2155-0521634c7dce" [ 1846.778288] env[62405]: _type = "Task" [ 1846.778288] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1846.786549] env[62405]: DEBUG oslo_vmware.api [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52b806f3-659b-a2b4-2155-0521634c7dce, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1846.822280] env[62405]: DEBUG oslo_concurrency.lockutils [None req-440e9859-e2e8-4e08-a890-4f5c82c281cc tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquiring lock "interface-9d97bf1d-6830-48b1-831b-bf2b52188f32-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1846.822570] env[62405]: DEBUG oslo_concurrency.lockutils [None req-440e9859-e2e8-4e08-a890-4f5c82c281cc tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Lock "interface-9d97bf1d-6830-48b1-831b-bf2b52188f32-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1846.822901] env[62405]: DEBUG nova.objects.instance [None req-440e9859-e2e8-4e08-a890-4f5c82c281cc tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Lazy-loading 'flavor' on Instance uuid 9d97bf1d-6830-48b1-831b-bf2b52188f32 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1846.880335] env[62405]: DEBUG oslo_vmware.api [None req-8fb3f872-37de-4222-9954-c11fb8576251 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947667, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.233739} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1846.883609] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-8fb3f872-37de-4222-9954-c11fb8576251 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1846.883810] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-8fb3f872-37de-4222-9954-c11fb8576251 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 79548471-56f8-410c-a664-d2242541cd2a] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1846.884039] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-8fb3f872-37de-4222-9954-c11fb8576251 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 79548471-56f8-410c-a664-d2242541cd2a] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1846.884185] env[62405]: INFO nova.compute.manager [None req-8fb3f872-37de-4222-9954-c11fb8576251 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 79548471-56f8-410c-a664-d2242541cd2a] Took 1.20 seconds to destroy the instance on the hypervisor. [ 1846.884543] env[62405]: DEBUG oslo.service.loopingcall [None req-8fb3f872-37de-4222-9954-c11fb8576251 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1846.885121] env[62405]: DEBUG nova.compute.manager [-] [instance: 79548471-56f8-410c-a664-d2242541cd2a] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1846.885121] env[62405]: DEBUG nova.network.neutron [-] [instance: 79548471-56f8-410c-a664-d2242541cd2a] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1846.893859] env[62405]: DEBUG oslo_vmware.api [None req-3f4a7c9f-6417-4f6d-b761-d9764b134ea4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': task-1947668, 'name': ReconfigVM_Task, 'duration_secs': 0.279924} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1846.893859] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-3f4a7c9f-6417-4f6d-b761-d9764b134ea4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: 271cec64-e7b4-4a1b-a7d6-f3fd60086209] Reconfigured VM instance instance-0000004f to attach disk [datastore1] 271cec64-e7b4-4a1b-a7d6-f3fd60086209/271cec64-e7b4-4a1b-a7d6-f3fd60086209.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1846.894294] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-64e10911-5378-4789-aabf-f1989cbb00ad {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.902907] env[62405]: DEBUG oslo_vmware.api [None req-3f4a7c9f-6417-4f6d-b761-d9764b134ea4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Waiting for the task: (returnval){ [ 1846.902907] env[62405]: value = "task-1947669" [ 1846.902907] env[62405]: _type = "Task" [ 1846.902907] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1846.912526] env[62405]: DEBUG oslo_vmware.api [None req-3f4a7c9f-6417-4f6d-b761-d9764b134ea4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': task-1947669, 'name': Rename_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1846.934373] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7153bd6b-4d5b-44a6-9739-370310d8a0f4 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1847.053167] env[62405]: DEBUG oslo_concurrency.lockutils [None req-bf5fbeee-54e7-4270-8309-84e1125402f3 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1847.222563] env[62405]: DEBUG nova.scheduler.client.report [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Updated inventory for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with generation 122 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1847.222880] env[62405]: DEBUG nova.compute.provider_tree [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Updating resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 generation from 122 to 123 during operation: update_inventory {{(pid=62405) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1847.223047] env[62405]: DEBUG nova.compute.provider_tree [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1847.289917] env[62405]: DEBUG oslo_vmware.api [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52b806f3-659b-a2b4-2155-0521634c7dce, 'name': SearchDatastore_Task, 'duration_secs': 0.050086} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1847.290193] env[62405]: DEBUG oslo_concurrency.lockutils [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1847.290444] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 65cd4af4-30cf-4435-8f32-501db450905f/65cd4af4-30cf-4435-8f32-501db450905f.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1847.290694] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d64ba570-5254-4119-8ca3-a0416ebcb8e8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.299182] env[62405]: DEBUG oslo_vmware.api [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Waiting for the task: (returnval){ [ 1847.299182] env[62405]: value = "task-1947670" [ 1847.299182] env[62405]: _type = "Task" [ 1847.299182] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1847.307715] env[62405]: DEBUG oslo_vmware.api [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Task: {'id': task-1947670, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1847.413848] env[62405]: DEBUG oslo_vmware.api [None req-3f4a7c9f-6417-4f6d-b761-d9764b134ea4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': task-1947669, 'name': Rename_Task, 'duration_secs': 0.143402} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1847.414266] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f4a7c9f-6417-4f6d-b761-d9764b134ea4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: 271cec64-e7b4-4a1b-a7d6-f3fd60086209] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1847.414422] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ed5ce6bc-faad-4253-be1d-368a251a6e0c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.422495] env[62405]: DEBUG oslo_vmware.api [None req-3f4a7c9f-6417-4f6d-b761-d9764b134ea4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Waiting for the task: (returnval){ [ 1847.422495] env[62405]: value = "task-1947671" [ 1847.422495] env[62405]: _type = "Task" [ 1847.422495] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1847.426281] env[62405]: DEBUG nova.objects.instance [None req-440e9859-e2e8-4e08-a890-4f5c82c281cc tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Lazy-loading 'pci_requests' on Instance uuid 9d97bf1d-6830-48b1-831b-bf2b52188f32 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1847.433416] env[62405]: DEBUG oslo_vmware.api [None req-3f4a7c9f-6417-4f6d-b761-d9764b134ea4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': task-1947671, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1847.670368] env[62405]: DEBUG nova.network.neutron [-] [instance: 79548471-56f8-410c-a664-d2242541cd2a] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1847.729726] env[62405]: DEBUG oslo_concurrency.lockutils [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.610s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1847.730248] env[62405]: DEBUG nova.compute.manager [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] [instance: 171910d2-02b8-4219-ae75-5cecccea1de3] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1847.734015] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b83bb652-4c7d-4264-9e9f-8656ba062a25 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.359s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1847.734279] env[62405]: DEBUG nova.objects.instance [None req-b83bb652-4c7d-4264-9e9f-8656ba062a25 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Lazy-loading 'resources' on Instance uuid af174cbf-3555-42b0-bacd-033f9ff46f08 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1847.811217] env[62405]: DEBUG oslo_vmware.api [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Task: {'id': task-1947670, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1847.932275] env[62405]: DEBUG nova.objects.base [None req-440e9859-e2e8-4e08-a890-4f5c82c281cc tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Object Instance<9d97bf1d-6830-48b1-831b-bf2b52188f32> lazy-loaded attributes: flavor,pci_requests {{(pid=62405) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1847.933041] env[62405]: DEBUG nova.network.neutron [None req-440e9859-e2e8-4e08-a890-4f5c82c281cc tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1847.934363] env[62405]: DEBUG oslo_vmware.api [None req-3f4a7c9f-6417-4f6d-b761-d9764b134ea4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': task-1947671, 'name': PowerOnVM_Task, 'duration_secs': 0.505638} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1847.934613] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f4a7c9f-6417-4f6d-b761-d9764b134ea4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: 271cec64-e7b4-4a1b-a7d6-f3fd60086209] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1847.934812] env[62405]: INFO nova.compute.manager [None req-3f4a7c9f-6417-4f6d-b761-d9764b134ea4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: 271cec64-e7b4-4a1b-a7d6-f3fd60086209] Took 7.56 seconds to spawn the instance on the hypervisor. [ 1847.935014] env[62405]: DEBUG nova.compute.manager [None req-3f4a7c9f-6417-4f6d-b761-d9764b134ea4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: 271cec64-e7b4-4a1b-a7d6-f3fd60086209] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1847.936146] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08c54fb7-fa2f-4d2f-959a-e30bb49a9a87 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.003211] env[62405]: DEBUG nova.policy [None req-440e9859-e2e8-4e08-a890-4f5c82c281cc tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '13540c2dbc2b43bcb151ec7b5894904c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ba9083cddcc24345b6ea5d2cbbbec5ba', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1848.112648] env[62405]: DEBUG nova.compute.manager [req-efbc5fe8-51f4-451d-ac08-5aae5201c5f8 req-08eb9910-9691-43bf-8104-dfdde648657c service nova] [instance: 79548471-56f8-410c-a664-d2242541cd2a] Received event network-vif-deleted-512621ba-6031-4414-bcd1-627311dbd9a0 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1848.173457] env[62405]: INFO nova.compute.manager [-] [instance: 79548471-56f8-410c-a664-d2242541cd2a] Took 1.29 seconds to deallocate network for instance. [ 1848.235233] env[62405]: DEBUG nova.compute.utils [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1848.236615] env[62405]: DEBUG nova.compute.manager [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] [instance: 171910d2-02b8-4219-ae75-5cecccea1de3] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1848.236790] env[62405]: DEBUG nova.network.neutron [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] [instance: 171910d2-02b8-4219-ae75-5cecccea1de3] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1848.297548] env[62405]: DEBUG nova.policy [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8d34125804204f3b92e06e7b8738d73a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e0d6dfea772e432289163b14e9e341c1', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1848.313321] env[62405]: DEBUG oslo_vmware.api [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Task: {'id': task-1947670, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.58645} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1848.313520] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 65cd4af4-30cf-4435-8f32-501db450905f/65cd4af4-30cf-4435-8f32-501db450905f.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1848.314425] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] [instance: 65cd4af4-30cf-4435-8f32-501db450905f] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1848.314425] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c2003ce0-d9b7-4728-bd55-70dc70c7f2e1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.325299] env[62405]: DEBUG oslo_vmware.api [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Waiting for the task: (returnval){ [ 1848.325299] env[62405]: value = "task-1947672" [ 1848.325299] env[62405]: _type = "Task" [ 1848.325299] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1848.332779] env[62405]: DEBUG nova.network.neutron [None req-440e9859-e2e8-4e08-a890-4f5c82c281cc tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Successfully created port: a466989b-10e1-492c-a30a-33ba96b092ca {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1848.339371] env[62405]: DEBUG oslo_vmware.api [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Task: {'id': task-1947672, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1848.461859] env[62405]: INFO nova.compute.manager [None req-3f4a7c9f-6417-4f6d-b761-d9764b134ea4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: 271cec64-e7b4-4a1b-a7d6-f3fd60086209] Took 34.38 seconds to build instance. [ 1848.651573] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f12c8551-a2c8-45a6-bbda-a26a575f9ccb {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.660348] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46068611-4385-453f-93b2-742159356093 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.692448] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8fb3f872-37de-4222-9954-c11fb8576251 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1848.693430] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b73b32d-2aee-45cb-810e-5d38d0cb13a5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.701541] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-870e0203-6901-448e-938b-01a83ab1147e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.715944] env[62405]: DEBUG nova.compute.provider_tree [None req-b83bb652-4c7d-4264-9e9f-8656ba062a25 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1848.729423] env[62405]: DEBUG nova.network.neutron [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] [instance: 171910d2-02b8-4219-ae75-5cecccea1de3] Successfully created port: 0a8cd850-d9e3-4640-98b7-44b386609be6 {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1848.742933] env[62405]: DEBUG nova.compute.manager [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] [instance: 171910d2-02b8-4219-ae75-5cecccea1de3] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1848.836122] env[62405]: DEBUG oslo_vmware.api [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Task: {'id': task-1947672, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.109853} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1848.836408] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] [instance: 65cd4af4-30cf-4435-8f32-501db450905f] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1848.837220] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a8d1d57-6716-476e-9cc3-b08ce0bdf623 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.862041] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] [instance: 65cd4af4-30cf-4435-8f32-501db450905f] Reconfiguring VM instance instance-00000050 to attach disk [datastore1] 65cd4af4-30cf-4435-8f32-501db450905f/65cd4af4-30cf-4435-8f32-501db450905f.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1848.862344] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-75c08266-ea0e-43e7-a5ce-5e0db8f81802 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.883775] env[62405]: DEBUG oslo_vmware.api [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Waiting for the task: (returnval){ [ 1848.883775] env[62405]: value = "task-1947673" [ 1848.883775] env[62405]: _type = "Task" [ 1848.883775] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1848.898803] env[62405]: DEBUG oslo_vmware.api [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Task: {'id': task-1947673, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1848.962314] env[62405]: DEBUG nova.network.neutron [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] [instance: 171910d2-02b8-4219-ae75-5cecccea1de3] Successfully created port: 2b494e96-08e7-4608-a930-5d9da520c342 {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1848.965067] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3f4a7c9f-6417-4f6d-b761-d9764b134ea4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Lock "271cec64-e7b4-4a1b-a7d6-f3fd60086209" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.890s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1849.250831] env[62405]: DEBUG nova.scheduler.client.report [None req-b83bb652-4c7d-4264-9e9f-8656ba062a25 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Updated inventory for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with generation 123 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1849.251182] env[62405]: DEBUG nova.compute.provider_tree [None req-b83bb652-4c7d-4264-9e9f-8656ba062a25 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Updating resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 generation from 123 to 124 during operation: update_inventory {{(pid=62405) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1849.251324] env[62405]: DEBUG nova.compute.provider_tree [None req-b83bb652-4c7d-4264-9e9f-8656ba062a25 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1849.394876] env[62405]: DEBUG oslo_vmware.api [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Task: {'id': task-1947673, 'name': ReconfigVM_Task, 'duration_secs': 0.27792} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1849.395178] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] [instance: 65cd4af4-30cf-4435-8f32-501db450905f] Reconfigured VM instance instance-00000050 to attach disk [datastore1] 65cd4af4-30cf-4435-8f32-501db450905f/65cd4af4-30cf-4435-8f32-501db450905f.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1849.395882] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c189f0e8-a8a0-420e-80a6-a34226935d67 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.404028] env[62405]: DEBUG oslo_vmware.api [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Waiting for the task: (returnval){ [ 1849.404028] env[62405]: value = "task-1947674" [ 1849.404028] env[62405]: _type = "Task" [ 1849.404028] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1849.410807] env[62405]: DEBUG oslo_vmware.api [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Task: {'id': task-1947674, 'name': Rename_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1849.563634] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1849.563832] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1849.756577] env[62405]: DEBUG nova.compute.manager [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] [instance: 171910d2-02b8-4219-ae75-5cecccea1de3] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1849.759287] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b83bb652-4c7d-4264-9e9f-8656ba062a25 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.025s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1849.761385] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a87eee90-5581-4771-990c-7a8a5cfca6b9 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.148s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1849.761727] env[62405]: DEBUG nova.objects.instance [None req-a87eee90-5581-4771-990c-7a8a5cfca6b9 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Lazy-loading 'resources' on Instance uuid 00158b10-4292-48f3-85a0-991af1dbc5f1 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1849.771469] env[62405]: DEBUG oslo_concurrency.lockutils [None req-84ba1ec7-6780-46b6-bc3d-a93321a3e6b4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Acquiring lock "271cec64-e7b4-4a1b-a7d6-f3fd60086209" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1849.771753] env[62405]: DEBUG oslo_concurrency.lockutils [None req-84ba1ec7-6780-46b6-bc3d-a93321a3e6b4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Lock "271cec64-e7b4-4a1b-a7d6-f3fd60086209" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1849.772041] env[62405]: DEBUG oslo_concurrency.lockutils [None req-84ba1ec7-6780-46b6-bc3d-a93321a3e6b4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Acquiring lock "271cec64-e7b4-4a1b-a7d6-f3fd60086209-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1849.772151] env[62405]: DEBUG oslo_concurrency.lockutils [None req-84ba1ec7-6780-46b6-bc3d-a93321a3e6b4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Lock "271cec64-e7b4-4a1b-a7d6-f3fd60086209-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1849.772334] env[62405]: DEBUG oslo_concurrency.lockutils [None req-84ba1ec7-6780-46b6-bc3d-a93321a3e6b4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Lock "271cec64-e7b4-4a1b-a7d6-f3fd60086209-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1849.774944] env[62405]: INFO nova.compute.manager [None req-84ba1ec7-6780-46b6-bc3d-a93321a3e6b4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: 271cec64-e7b4-4a1b-a7d6-f3fd60086209] Terminating instance [ 1849.783631] env[62405]: INFO nova.scheduler.client.report [None req-b83bb652-4c7d-4264-9e9f-8656ba062a25 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Deleted allocations for instance af174cbf-3555-42b0-bacd-033f9ff46f08 [ 1849.794141] env[62405]: DEBUG nova.virt.hardware [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1849.794400] env[62405]: DEBUG nova.virt.hardware [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1849.794555] env[62405]: DEBUG nova.virt.hardware [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1849.794772] env[62405]: DEBUG nova.virt.hardware [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1849.794930] env[62405]: DEBUG nova.virt.hardware [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1849.795260] env[62405]: DEBUG nova.virt.hardware [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1849.795509] env[62405]: DEBUG nova.virt.hardware [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1849.795674] env[62405]: DEBUG nova.virt.hardware [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1849.795841] env[62405]: DEBUG nova.virt.hardware [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1849.796010] env[62405]: DEBUG nova.virt.hardware [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1849.796194] env[62405]: DEBUG nova.virt.hardware [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1849.797309] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-289ee172-9136-4e30-863a-2e2a98d10dd4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.805648] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31f035a3-1186-45e6-b531-30ee037aafbd {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.916025] env[62405]: DEBUG oslo_vmware.api [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Task: {'id': task-1947674, 'name': Rename_Task, 'duration_secs': 0.138724} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1849.916529] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] [instance: 65cd4af4-30cf-4435-8f32-501db450905f] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1849.916722] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fafdb0a0-d78b-405e-bfbc-665ea354802e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.924200] env[62405]: DEBUG oslo_vmware.api [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Waiting for the task: (returnval){ [ 1849.924200] env[62405]: value = "task-1947675" [ 1849.924200] env[62405]: _type = "Task" [ 1849.924200] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1849.932340] env[62405]: DEBUG oslo_vmware.api [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Task: {'id': task-1947675, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1850.072619] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1850.072763] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Starting heal instance info cache {{(pid=62405) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10446}} [ 1850.072892] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Rebuilding the list of instances to heal {{(pid=62405) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10450}} [ 1850.079915] env[62405]: DEBUG nova.compute.manager [req-a6ddd7b7-6ea1-4b90-862b-adeccd789b4a req-3ab79312-0b08-4f64-a2ce-2d3aa99a6696 service nova] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Received event network-vif-plugged-a466989b-10e1-492c-a30a-33ba96b092ca {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1850.079915] env[62405]: DEBUG oslo_concurrency.lockutils [req-a6ddd7b7-6ea1-4b90-862b-adeccd789b4a req-3ab79312-0b08-4f64-a2ce-2d3aa99a6696 service nova] Acquiring lock "9d97bf1d-6830-48b1-831b-bf2b52188f32-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1850.080085] env[62405]: DEBUG oslo_concurrency.lockutils [req-a6ddd7b7-6ea1-4b90-862b-adeccd789b4a req-3ab79312-0b08-4f64-a2ce-2d3aa99a6696 service nova] Lock "9d97bf1d-6830-48b1-831b-bf2b52188f32-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1850.080249] env[62405]: DEBUG oslo_concurrency.lockutils [req-a6ddd7b7-6ea1-4b90-862b-adeccd789b4a req-3ab79312-0b08-4f64-a2ce-2d3aa99a6696 service nova] Lock "9d97bf1d-6830-48b1-831b-bf2b52188f32-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1850.080378] env[62405]: DEBUG nova.compute.manager [req-a6ddd7b7-6ea1-4b90-862b-adeccd789b4a req-3ab79312-0b08-4f64-a2ce-2d3aa99a6696 service nova] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] No waiting events found dispatching network-vif-plugged-a466989b-10e1-492c-a30a-33ba96b092ca {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1850.080527] env[62405]: WARNING nova.compute.manager [req-a6ddd7b7-6ea1-4b90-862b-adeccd789b4a req-3ab79312-0b08-4f64-a2ce-2d3aa99a6696 service nova] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Received unexpected event network-vif-plugged-a466989b-10e1-492c-a30a-33ba96b092ca for instance with vm_state active and task_state None. [ 1850.279105] env[62405]: DEBUG nova.compute.manager [None req-84ba1ec7-6780-46b6-bc3d-a93321a3e6b4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: 271cec64-e7b4-4a1b-a7d6-f3fd60086209] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1850.279361] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-84ba1ec7-6780-46b6-bc3d-a93321a3e6b4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: 271cec64-e7b4-4a1b-a7d6-f3fd60086209] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1850.280240] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe66b08b-e7aa-486d-b7fc-fc87dc897641 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1850.293977] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-84ba1ec7-6780-46b6-bc3d-a93321a3e6b4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: 271cec64-e7b4-4a1b-a7d6-f3fd60086209] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1850.294563] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b83bb652-4c7d-4264-9e9f-8656ba062a25 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Lock "af174cbf-3555-42b0-bacd-033f9ff46f08" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.174s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1850.295439] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9c2c4f05-4b79-4693-9b5c-57f542ef2055 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1850.304781] env[62405]: DEBUG oslo_vmware.api [None req-84ba1ec7-6780-46b6-bc3d-a93321a3e6b4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Waiting for the task: (returnval){ [ 1850.304781] env[62405]: value = "task-1947676" [ 1850.304781] env[62405]: _type = "Task" [ 1850.304781] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1850.316238] env[62405]: DEBUG oslo_vmware.api [None req-84ba1ec7-6780-46b6-bc3d-a93321a3e6b4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': task-1947676, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1850.437230] env[62405]: DEBUG oslo_vmware.api [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Task: {'id': task-1947675, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1850.452151] env[62405]: DEBUG nova.network.neutron [None req-440e9859-e2e8-4e08-a890-4f5c82c281cc tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Successfully updated port: a466989b-10e1-492c-a30a-33ba96b092ca {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1850.582137] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 271cec64-e7b4-4a1b-a7d6-f3fd60086209] Skipping network cache update for instance because it is being deleted. {{(pid=62405) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10463}} [ 1850.582335] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 65cd4af4-30cf-4435-8f32-501db450905f] Skipping network cache update for instance because it is Building. {{(pid=62405) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10459}} [ 1850.582511] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 171910d2-02b8-4219-ae75-5cecccea1de3] Skipping network cache update for instance because it is Building. {{(pid=62405) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10459}} [ 1850.600056] env[62405]: DEBUG nova.compute.manager [req-5d0a2fab-9c41-4193-8b9c-f0b2165f3c11 req-06cc8979-4f6c-47bc-a5ed-d172411422a9 service nova] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Received event network-changed-a466989b-10e1-492c-a30a-33ba96b092ca {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1850.600257] env[62405]: DEBUG nova.compute.manager [req-5d0a2fab-9c41-4193-8b9c-f0b2165f3c11 req-06cc8979-4f6c-47bc-a5ed-d172411422a9 service nova] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Refreshing instance network info cache due to event network-changed-a466989b-10e1-492c-a30a-33ba96b092ca. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1850.600468] env[62405]: DEBUG oslo_concurrency.lockutils [req-5d0a2fab-9c41-4193-8b9c-f0b2165f3c11 req-06cc8979-4f6c-47bc-a5ed-d172411422a9 service nova] Acquiring lock "refresh_cache-9d97bf1d-6830-48b1-831b-bf2b52188f32" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1850.600613] env[62405]: DEBUG oslo_concurrency.lockutils [req-5d0a2fab-9c41-4193-8b9c-f0b2165f3c11 req-06cc8979-4f6c-47bc-a5ed-d172411422a9 service nova] Acquired lock "refresh_cache-9d97bf1d-6830-48b1-831b-bf2b52188f32" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1850.600772] env[62405]: DEBUG nova.network.neutron [req-5d0a2fab-9c41-4193-8b9c-f0b2165f3c11 req-06cc8979-4f6c-47bc-a5ed-d172411422a9 service nova] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Refreshing network info cache for port a466989b-10e1-492c-a30a-33ba96b092ca {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1850.602517] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Acquiring lock "refresh_cache-15218373-ffa5-49ce-b604-423b7fc5fb35" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1850.602517] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Acquired lock "refresh_cache-15218373-ffa5-49ce-b604-423b7fc5fb35" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1850.602663] env[62405]: DEBUG nova.network.neutron [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Forcefully refreshing network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1850.603138] env[62405]: DEBUG nova.objects.instance [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lazy-loading 'info_cache' on Instance uuid 15218373-ffa5-49ce-b604-423b7fc5fb35 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1850.609038] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8762d05d-6b1d-4fc8-b4c3-87d97df0dfa5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1850.617903] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92c65fff-84fd-45ec-88a1-30a77d24e9c8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1850.648495] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e0fdc96-6d3a-479d-a266-fce46bef5583 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1850.656787] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1157eb23-ed05-42be-88a6-4c7a7af9f7e0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1850.672041] env[62405]: DEBUG nova.compute.provider_tree [None req-a87eee90-5581-4771-990c-7a8a5cfca6b9 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1850.815688] env[62405]: DEBUG oslo_vmware.api [None req-84ba1ec7-6780-46b6-bc3d-a93321a3e6b4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': task-1947676, 'name': PowerOffVM_Task, 'duration_secs': 0.233797} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1850.815948] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-84ba1ec7-6780-46b6-bc3d-a93321a3e6b4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: 271cec64-e7b4-4a1b-a7d6-f3fd60086209] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1850.816155] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-84ba1ec7-6780-46b6-bc3d-a93321a3e6b4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: 271cec64-e7b4-4a1b-a7d6-f3fd60086209] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1850.816418] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-edf0636a-cc85-4a96-af6f-78e84f42d53e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1850.937447] env[62405]: DEBUG oslo_vmware.api [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Task: {'id': task-1947675, 'name': PowerOnVM_Task, 'duration_secs': 0.535595} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1850.937681] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] [instance: 65cd4af4-30cf-4435-8f32-501db450905f] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1850.937915] env[62405]: INFO nova.compute.manager [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] [instance: 65cd4af4-30cf-4435-8f32-501db450905f] Took 7.85 seconds to spawn the instance on the hypervisor. [ 1850.938164] env[62405]: DEBUG nova.compute.manager [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] [instance: 65cd4af4-30cf-4435-8f32-501db450905f] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1850.938965] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab359ee7-e62e-4406-a118-a613a235feb0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1850.954925] env[62405]: DEBUG oslo_concurrency.lockutils [None req-440e9859-e2e8-4e08-a890-4f5c82c281cc tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquiring lock "refresh_cache-9d97bf1d-6830-48b1-831b-bf2b52188f32" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1851.016970] env[62405]: DEBUG oslo_concurrency.lockutils [None req-aaf55f6a-42fd-4537-9ca0-b62d12c69a21 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquiring lock "08d7be6c-0557-46af-ae8d-e1c68e878cae" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1851.017260] env[62405]: DEBUG oslo_concurrency.lockutils [None req-aaf55f6a-42fd-4537-9ca0-b62d12c69a21 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Lock "08d7be6c-0557-46af-ae8d-e1c68e878cae" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1851.174989] env[62405]: DEBUG nova.scheduler.client.report [None req-a87eee90-5581-4771-990c-7a8a5cfca6b9 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1851.300566] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b3aa5f3d-4f83-45ee-95dc-ac56a1f4e8ba tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Acquiring lock "34ec55c6-1a7a-4ffa-8efd-9eedd7495d44" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1851.300929] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b3aa5f3d-4f83-45ee-95dc-ac56a1f4e8ba tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Lock "34ec55c6-1a7a-4ffa-8efd-9eedd7495d44" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1851.301087] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b3aa5f3d-4f83-45ee-95dc-ac56a1f4e8ba tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Acquiring lock "34ec55c6-1a7a-4ffa-8efd-9eedd7495d44-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1851.301280] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b3aa5f3d-4f83-45ee-95dc-ac56a1f4e8ba tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Lock "34ec55c6-1a7a-4ffa-8efd-9eedd7495d44-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1851.301450] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b3aa5f3d-4f83-45ee-95dc-ac56a1f4e8ba tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Lock "34ec55c6-1a7a-4ffa-8efd-9eedd7495d44-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1851.303630] env[62405]: INFO nova.compute.manager [None req-b3aa5f3d-4f83-45ee-95dc-ac56a1f4e8ba tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] [instance: 34ec55c6-1a7a-4ffa-8efd-9eedd7495d44] Terminating instance [ 1851.391559] env[62405]: DEBUG nova.network.neutron [req-5d0a2fab-9c41-4193-8b9c-f0b2165f3c11 req-06cc8979-4f6c-47bc-a5ed-d172411422a9 service nova] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Added VIF to instance network info cache for port a466989b-10e1-492c-a30a-33ba96b092ca. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3546}} [ 1851.392067] env[62405]: DEBUG nova.network.neutron [req-5d0a2fab-9c41-4193-8b9c-f0b2165f3c11 req-06cc8979-4f6c-47bc-a5ed-d172411422a9 service nova] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Updating instance_info_cache with network_info: [{"id": "09308517-a17c-48d3-b01f-fed73b19adfd", "address": "fa:16:3e:d4:17:23", "network": {"id": "8e7f7222-48db-4dd5-a9e8-9a6d2b598918", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1945720715-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.200", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba9083cddcc24345b6ea5d2cbbbec5ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap09308517-a1", "ovs_interfaceid": "09308517-a17c-48d3-b01f-fed73b19adfd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "a466989b-10e1-492c-a30a-33ba96b092ca", "address": "fa:16:3e:f8:73:cc", "network": {"id": "8e7f7222-48db-4dd5-a9e8-9a6d2b598918", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1945720715-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba9083cddcc24345b6ea5d2cbbbec5ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa466989b-10", "ovs_interfaceid": "a466989b-10e1-492c-a30a-33ba96b092ca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1851.456143] env[62405]: INFO nova.compute.manager [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] [instance: 65cd4af4-30cf-4435-8f32-501db450905f] Took 31.12 seconds to build instance. [ 1851.519529] env[62405]: DEBUG nova.compute.manager [None req-aaf55f6a-42fd-4537-9ca0-b62d12c69a21 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 08d7be6c-0557-46af-ae8d-e1c68e878cae] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1851.624053] env[62405]: DEBUG nova.network.neutron [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1851.680479] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a87eee90-5581-4771-990c-7a8a5cfca6b9 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.919s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1851.682753] env[62405]: DEBUG oslo_concurrency.lockutils [None req-99098b44-1282-474a-a097-79c63411ba82 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.789s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1851.682992] env[62405]: DEBUG nova.objects.instance [None req-99098b44-1282-474a-a097-79c63411ba82 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Lazy-loading 'resources' on Instance uuid b4b89cf6-4159-40fa-8b67-4d8bbf16eb32 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1851.703197] env[62405]: INFO nova.scheduler.client.report [None req-a87eee90-5581-4771-990c-7a8a5cfca6b9 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Deleted allocations for instance 00158b10-4292-48f3-85a0-991af1dbc5f1 [ 1851.807638] env[62405]: DEBUG nova.compute.manager [None req-b3aa5f3d-4f83-45ee-95dc-ac56a1f4e8ba tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] [instance: 34ec55c6-1a7a-4ffa-8efd-9eedd7495d44] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1851.807859] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-b3aa5f3d-4f83-45ee-95dc-ac56a1f4e8ba tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] [instance: 34ec55c6-1a7a-4ffa-8efd-9eedd7495d44] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1851.809489] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a96e55d6-b9fe-4194-9d9c-b3df465806b8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.817560] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3aa5f3d-4f83-45ee-95dc-ac56a1f4e8ba tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] [instance: 34ec55c6-1a7a-4ffa-8efd-9eedd7495d44] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1851.817797] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-89b4e8c6-8cc2-4279-b342-1e3dbaf4bb16 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.824715] env[62405]: DEBUG oslo_vmware.api [None req-b3aa5f3d-4f83-45ee-95dc-ac56a1f4e8ba tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Waiting for the task: (returnval){ [ 1851.824715] env[62405]: value = "task-1947678" [ 1851.824715] env[62405]: _type = "Task" [ 1851.824715] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1851.832493] env[62405]: DEBUG oslo_vmware.api [None req-b3aa5f3d-4f83-45ee-95dc-ac56a1f4e8ba tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Task: {'id': task-1947678, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1851.895153] env[62405]: DEBUG oslo_concurrency.lockutils [req-5d0a2fab-9c41-4193-8b9c-f0b2165f3c11 req-06cc8979-4f6c-47bc-a5ed-d172411422a9 service nova] Releasing lock "refresh_cache-9d97bf1d-6830-48b1-831b-bf2b52188f32" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1851.895508] env[62405]: DEBUG oslo_concurrency.lockutils [None req-440e9859-e2e8-4e08-a890-4f5c82c281cc tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquired lock "refresh_cache-9d97bf1d-6830-48b1-831b-bf2b52188f32" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1851.895719] env[62405]: DEBUG nova.network.neutron [None req-440e9859-e2e8-4e08-a890-4f5c82c281cc tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1851.959334] env[62405]: DEBUG oslo_concurrency.lockutils [None req-092ff11e-38a9-44cd-8bc0-77580e11d388 tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Lock "65cd4af4-30cf-4435-8f32-501db450905f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.632s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1852.042677] env[62405]: DEBUG oslo_concurrency.lockutils [None req-aaf55f6a-42fd-4537-9ca0-b62d12c69a21 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1852.213489] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a87eee90-5581-4771-990c-7a8a5cfca6b9 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Lock "00158b10-4292-48f3-85a0-991af1dbc5f1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.192s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1852.215870] env[62405]: DEBUG nova.network.neutron [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1852.336880] env[62405]: DEBUG oslo_vmware.api [None req-b3aa5f3d-4f83-45ee-95dc-ac56a1f4e8ba tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Task: {'id': task-1947678, 'name': PowerOffVM_Task, 'duration_secs': 0.203487} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1852.337335] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3aa5f3d-4f83-45ee-95dc-ac56a1f4e8ba tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] [instance: 34ec55c6-1a7a-4ffa-8efd-9eedd7495d44] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1852.337541] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-b3aa5f3d-4f83-45ee-95dc-ac56a1f4e8ba tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] [instance: 34ec55c6-1a7a-4ffa-8efd-9eedd7495d44] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1852.337821] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9057f623-8167-450d-a809-aff8f3386ba4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.440547] env[62405]: WARNING nova.network.neutron [None req-440e9859-e2e8-4e08-a890-4f5c82c281cc tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] 8e7f7222-48db-4dd5-a9e8-9a6d2b598918 already exists in list: networks containing: ['8e7f7222-48db-4dd5-a9e8-9a6d2b598918']. ignoring it [ 1852.440946] env[62405]: WARNING nova.network.neutron [None req-440e9859-e2e8-4e08-a890-4f5c82c281cc tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] 8e7f7222-48db-4dd5-a9e8-9a6d2b598918 already exists in list: networks containing: ['8e7f7222-48db-4dd5-a9e8-9a6d2b598918']. ignoring it [ 1852.441158] env[62405]: WARNING nova.network.neutron [None req-440e9859-e2e8-4e08-a890-4f5c82c281cc tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] a466989b-10e1-492c-a30a-33ba96b092ca already exists in list: port_ids containing: ['a466989b-10e1-492c-a30a-33ba96b092ca']. ignoring it [ 1852.485789] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bcb7eff-3255-4f80-85a3-3b9fb7628b93 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.496643] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad6d678a-f50e-4490-b0a5-76d846a58b9d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.530177] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9fdf2c7-e3f7-4185-ab3e-d996cab2aa2f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.538109] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cca7a598-47aa-4946-a61f-54be8b9a8c0a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.551471] env[62405]: DEBUG nova.compute.provider_tree [None req-99098b44-1282-474a-a097-79c63411ba82 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1852.719503] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Releasing lock "refresh_cache-15218373-ffa5-49ce-b604-423b7fc5fb35" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1852.719709] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Updated the network info_cache for instance {{(pid=62405) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10517}} [ 1852.719904] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1852.720086] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1852.720244] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1852.720395] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1852.720539] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1852.720681] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._sync_power_states {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1853.054529] env[62405]: DEBUG nova.scheduler.client.report [None req-99098b44-1282-474a-a097-79c63411ba82 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1853.136727] env[62405]: DEBUG nova.network.neutron [None req-440e9859-e2e8-4e08-a890-4f5c82c281cc tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Updating instance_info_cache with network_info: [{"id": "09308517-a17c-48d3-b01f-fed73b19adfd", "address": "fa:16:3e:d4:17:23", "network": {"id": "8e7f7222-48db-4dd5-a9e8-9a6d2b598918", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1945720715-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.200", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba9083cddcc24345b6ea5d2cbbbec5ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap09308517-a1", "ovs_interfaceid": "09308517-a17c-48d3-b01f-fed73b19adfd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "a466989b-10e1-492c-a30a-33ba96b092ca", "address": "fa:16:3e:f8:73:cc", "network": {"id": "8e7f7222-48db-4dd5-a9e8-9a6d2b598918", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1945720715-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba9083cddcc24345b6ea5d2cbbbec5ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa466989b-10", "ovs_interfaceid": "a466989b-10e1-492c-a30a-33ba96b092ca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1853.227224] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Getting list of instances from cluster (obj){ [ 1853.227224] env[62405]: value = "domain-c8" [ 1853.227224] env[62405]: _type = "ClusterComputeResource" [ 1853.227224] env[62405]: } {{(pid=62405) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 1853.228747] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10b15044-3275-446d-a3ef-813b8f6a5342 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.250315] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Got total of 11 instances {{(pid=62405) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 1853.250465] env[62405]: WARNING nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] While synchronizing instance power states, found 20 instances in the database and 11 instances on the hypervisor. [ 1853.250566] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Triggering sync for uuid 15218373-ffa5-49ce-b604-423b7fc5fb35 {{(pid=62405) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10856}} [ 1853.250747] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Triggering sync for uuid 67bf25ea-5774-4246-a3e6-2aeb0ebf6731 {{(pid=62405) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10856}} [ 1853.250901] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Triggering sync for uuid 4d59d9fd-23df-4933-97ed-32602e51e9aa {{(pid=62405) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10856}} [ 1853.251069] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Triggering sync for uuid 34ec55c6-1a7a-4ffa-8efd-9eedd7495d44 {{(pid=62405) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10856}} [ 1853.251254] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Triggering sync for uuid 2c623c00-92f2-4cc4-8503-963c3308d708 {{(pid=62405) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10856}} [ 1853.251424] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Triggering sync for uuid 4c8c0d2f-d8d3-4422-8a5c-8999636b22be {{(pid=62405) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10856}} [ 1853.251576] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Triggering sync for uuid 79548471-56f8-410c-a664-d2242541cd2a {{(pid=62405) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10856}} [ 1853.251723] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Triggering sync for uuid a1a84837-deef-4ffc-8a47-4891bfc2c87a {{(pid=62405) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10856}} [ 1853.251871] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Triggering sync for uuid d186b2f4-3fd1-44be-b8a4-080972aff3a0 {{(pid=62405) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10856}} [ 1853.252030] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Triggering sync for uuid 742c8d94-48d1-4408-91dc-98f25661aa8d {{(pid=62405) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10856}} [ 1853.252187] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Triggering sync for uuid 7256b956-e41a-40ec-a687-a129a8bafcb6 {{(pid=62405) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10856}} [ 1853.252377] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Triggering sync for uuid 9aa9e0de-7314-4d8b-8e9f-b6d330cae914 {{(pid=62405) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10856}} [ 1853.252481] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Triggering sync for uuid 9d97bf1d-6830-48b1-831b-bf2b52188f32 {{(pid=62405) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10856}} [ 1853.252625] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Triggering sync for uuid f16e3d13-6db6-4f61-b0e4-661856a9166b {{(pid=62405) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10856}} [ 1853.252771] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Triggering sync for uuid d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d {{(pid=62405) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10856}} [ 1853.252916] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Triggering sync for uuid b4b89cf6-4159-40fa-8b67-4d8bbf16eb32 {{(pid=62405) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10856}} [ 1853.253070] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Triggering sync for uuid 73c5b28f-d21d-4ffc-9e67-911e4fb4db66 {{(pid=62405) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10856}} [ 1853.253221] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Triggering sync for uuid 271cec64-e7b4-4a1b-a7d6-f3fd60086209 {{(pid=62405) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10856}} [ 1853.253365] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Triggering sync for uuid 65cd4af4-30cf-4435-8f32-501db450905f {{(pid=62405) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10856}} [ 1853.253511] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Triggering sync for uuid 171910d2-02b8-4219-ae75-5cecccea1de3 {{(pid=62405) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10856}} [ 1853.253857] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Acquiring lock "15218373-ffa5-49ce-b604-423b7fc5fb35" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1853.254111] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Acquiring lock "67bf25ea-5774-4246-a3e6-2aeb0ebf6731" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1853.254369] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "67bf25ea-5774-4246-a3e6-2aeb0ebf6731" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1853.254661] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Acquiring lock "4d59d9fd-23df-4933-97ed-32602e51e9aa" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1853.254856] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "4d59d9fd-23df-4933-97ed-32602e51e9aa" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1853.255119] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Acquiring lock "34ec55c6-1a7a-4ffa-8efd-9eedd7495d44" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1853.255328] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Acquiring lock "2c623c00-92f2-4cc4-8503-963c3308d708" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1853.255505] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "2c623c00-92f2-4cc4-8503-963c3308d708" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1853.255733] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Acquiring lock "4c8c0d2f-d8d3-4422-8a5c-8999636b22be" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1853.255906] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "4c8c0d2f-d8d3-4422-8a5c-8999636b22be" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1853.256138] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Acquiring lock "79548471-56f8-410c-a664-d2242541cd2a" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1853.256343] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Acquiring lock "a1a84837-deef-4ffc-8a47-4891bfc2c87a" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1853.256538] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Acquiring lock "d186b2f4-3fd1-44be-b8a4-080972aff3a0" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1853.256712] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "d186b2f4-3fd1-44be-b8a4-080972aff3a0" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1853.256931] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Acquiring lock "742c8d94-48d1-4408-91dc-98f25661aa8d" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1853.257179] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Acquiring lock "7256b956-e41a-40ec-a687-a129a8bafcb6" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1853.257384] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "7256b956-e41a-40ec-a687-a129a8bafcb6" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1853.257743] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Acquiring lock "9aa9e0de-7314-4d8b-8e9f-b6d330cae914" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1853.257825] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Acquiring lock "9d97bf1d-6830-48b1-831b-bf2b52188f32" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1853.258053] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "9d97bf1d-6830-48b1-831b-bf2b52188f32" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1853.258375] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Acquiring lock "f16e3d13-6db6-4f61-b0e4-661856a9166b" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1853.258641] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "f16e3d13-6db6-4f61-b0e4-661856a9166b" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1853.258802] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Acquiring lock "d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1853.259034] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Acquiring lock "b4b89cf6-4159-40fa-8b67-4d8bbf16eb32" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1853.259222] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Acquiring lock "73c5b28f-d21d-4ffc-9e67-911e4fb4db66" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1853.259422] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Acquiring lock "271cec64-e7b4-4a1b-a7d6-f3fd60086209" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1853.259611] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Acquiring lock "65cd4af4-30cf-4435-8f32-501db450905f" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1853.259785] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "65cd4af4-30cf-4435-8f32-501db450905f" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1853.260016] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Acquiring lock "171910d2-02b8-4219-ae75-5cecccea1de3" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1853.260209] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1853.260365] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62405) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11065}} [ 1853.261089] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ebc1d98-c023-4198-9a03-5f1b40ee2ba5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.264141] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95c645b8-78d8-4622-89de-6a0980226b6a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.266824] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a768831-7f38-4805-a939-2c28c68bd4bc {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.269742] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94fdf1c0-156c-4e00-ba15-de4a47b81d56 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.272786] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83e675cf-fd4d-4fb1-ba4d-067f5cc04aa7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.275704] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d2f4e0d-5392-4a5d-9f84-d33b3e9e486b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.278692] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-560ecf03-63cc-41da-a3db-24cedfce4e30 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.283083] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07722ef5-92e5-4e37-ba58-1183f031bd9e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.285926] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d75019ee-7983-43a6-8eab-075c64be87a2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.288588] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager.update_available_resource {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1853.328518] env[62405]: WARNING oslo_messaging._drivers.amqpdriver [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Number of call queues is 11, greater than warning threshold: 10. There could be a leak. Increasing threshold to: 20 [ 1853.560234] env[62405]: DEBUG oslo_concurrency.lockutils [None req-99098b44-1282-474a-a097-79c63411ba82 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.877s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1853.563376] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4317e200-d3a7-46d4-abcb-a7a0c2661d46 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.801s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1853.563376] env[62405]: DEBUG nova.objects.instance [None req-4317e200-d3a7-46d4-abcb-a7a0c2661d46 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Lazy-loading 'resources' on Instance uuid 9aa9e0de-7314-4d8b-8e9f-b6d330cae914 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1853.586574] env[62405]: INFO nova.scheduler.client.report [None req-99098b44-1282-474a-a097-79c63411ba82 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Deleted allocations for instance b4b89cf6-4159-40fa-8b67-4d8bbf16eb32 [ 1853.639850] env[62405]: DEBUG oslo_concurrency.lockutils [None req-440e9859-e2e8-4e08-a890-4f5c82c281cc tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Releasing lock "refresh_cache-9d97bf1d-6830-48b1-831b-bf2b52188f32" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1853.640539] env[62405]: DEBUG oslo_concurrency.lockutils [None req-440e9859-e2e8-4e08-a890-4f5c82c281cc tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquiring lock "9d97bf1d-6830-48b1-831b-bf2b52188f32" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1853.797653] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1853.817956] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "2c623c00-92f2-4cc4-8503-963c3308d708" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.562s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1853.818215] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "67bf25ea-5774-4246-a3e6-2aeb0ebf6731" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.564s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1853.827730] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "4c8c0d2f-d8d3-4422-8a5c-8999636b22be" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.572s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1853.828017] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "4d59d9fd-23df-4933-97ed-32602e51e9aa" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.573s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1853.828370] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "9d97bf1d-6830-48b1-831b-bf2b52188f32" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.570s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1853.828590] env[62405]: DEBUG oslo_concurrency.lockutils [None req-440e9859-e2e8-4e08-a890-4f5c82c281cc tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquired lock "9d97bf1d-6830-48b1-831b-bf2b52188f32" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1853.829468] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f46596fc-a71a-4e71-b069-4a2007017a29 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.832226] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "f16e3d13-6db6-4f61-b0e4-661856a9166b" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.574s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1853.832529] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "65cd4af4-30cf-4435-8f32-501db450905f" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.573s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1853.848073] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "d186b2f4-3fd1-44be-b8a4-080972aff3a0" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.591s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1853.848073] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "7256b956-e41a-40ec-a687-a129a8bafcb6" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.590s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1853.848343] env[62405]: DEBUG nova.virt.hardware [None req-440e9859-e2e8-4e08-a890-4f5c82c281cc tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1853.848578] env[62405]: DEBUG nova.virt.hardware [None req-440e9859-e2e8-4e08-a890-4f5c82c281cc tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1853.848741] env[62405]: DEBUG nova.virt.hardware [None req-440e9859-e2e8-4e08-a890-4f5c82c281cc tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1853.848925] env[62405]: DEBUG nova.virt.hardware [None req-440e9859-e2e8-4e08-a890-4f5c82c281cc tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1853.849092] env[62405]: DEBUG nova.virt.hardware [None req-440e9859-e2e8-4e08-a890-4f5c82c281cc tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1853.849249] env[62405]: DEBUG nova.virt.hardware [None req-440e9859-e2e8-4e08-a890-4f5c82c281cc tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1853.849457] env[62405]: DEBUG nova.virt.hardware [None req-440e9859-e2e8-4e08-a890-4f5c82c281cc tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1853.849627] env[62405]: DEBUG nova.virt.hardware [None req-440e9859-e2e8-4e08-a890-4f5c82c281cc tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1853.849798] env[62405]: DEBUG nova.virt.hardware [None req-440e9859-e2e8-4e08-a890-4f5c82c281cc tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1853.849988] env[62405]: DEBUG nova.virt.hardware [None req-440e9859-e2e8-4e08-a890-4f5c82c281cc tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1853.850849] env[62405]: DEBUG nova.virt.hardware [None req-440e9859-e2e8-4e08-a890-4f5c82c281cc tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1853.856457] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-440e9859-e2e8-4e08-a890-4f5c82c281cc tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Reconfiguring VM to attach interface {{(pid=62405) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1853.856769] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-249a7b56-9e06-43e1-aa58-d3626cea26eb {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.876325] env[62405]: DEBUG oslo_vmware.api [None req-440e9859-e2e8-4e08-a890-4f5c82c281cc tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Waiting for the task: (returnval){ [ 1853.876325] env[62405]: value = "task-1947680" [ 1853.876325] env[62405]: _type = "Task" [ 1853.876325] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1853.884699] env[62405]: DEBUG oslo_vmware.api [None req-440e9859-e2e8-4e08-a890-4f5c82c281cc tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947680, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1854.094273] env[62405]: DEBUG oslo_concurrency.lockutils [None req-99098b44-1282-474a-a097-79c63411ba82 tempest-MultipleCreateTestJSON-1119773673 tempest-MultipleCreateTestJSON-1119773673-project-member] Lock "b4b89cf6-4159-40fa-8b67-4d8bbf16eb32" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.907s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1854.097782] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "b4b89cf6-4159-40fa-8b67-4d8bbf16eb32" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.836s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1854.098012] env[62405]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b1ad34e6-0111-4877-be92-37437aa26ae9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.108626] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-286c45c3-e02c-44ca-a141-485a4dc33bfb {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.150875] env[62405]: DEBUG nova.compute.manager [req-a9301c84-e0cc-483f-8ef4-c9d30d988e67 req-15fa7e19-862d-4577-9301-00352542ad91 service nova] [instance: 65cd4af4-30cf-4435-8f32-501db450905f] Received event network-changed-02e1d24b-61e9-485a-8968-37f57cd76b08 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1854.151142] env[62405]: DEBUG nova.compute.manager [req-a9301c84-e0cc-483f-8ef4-c9d30d988e67 req-15fa7e19-862d-4577-9301-00352542ad91 service nova] [instance: 65cd4af4-30cf-4435-8f32-501db450905f] Refreshing instance network info cache due to event network-changed-02e1d24b-61e9-485a-8968-37f57cd76b08. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1854.151319] env[62405]: DEBUG oslo_concurrency.lockutils [req-a9301c84-e0cc-483f-8ef4-c9d30d988e67 req-15fa7e19-862d-4577-9301-00352542ad91 service nova] Acquiring lock "refresh_cache-65cd4af4-30cf-4435-8f32-501db450905f" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1854.151468] env[62405]: DEBUG oslo_concurrency.lockutils [req-a9301c84-e0cc-483f-8ef4-c9d30d988e67 req-15fa7e19-862d-4577-9301-00352542ad91 service nova] Acquired lock "refresh_cache-65cd4af4-30cf-4435-8f32-501db450905f" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1854.151634] env[62405]: DEBUG nova.network.neutron [req-a9301c84-e0cc-483f-8ef4-c9d30d988e67 req-15fa7e19-862d-4577-9301-00352542ad91 service nova] [instance: 65cd4af4-30cf-4435-8f32-501db450905f] Refreshing network info cache for port 02e1d24b-61e9-485a-8968-37f57cd76b08 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1854.372014] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b638706-733f-4b7c-a890-f26c32380100 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.385203] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8718d732-7e3f-4824-bcca-6d79087b31ed {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.393899] env[62405]: DEBUG oslo_vmware.api [None req-440e9859-e2e8-4e08-a890-4f5c82c281cc tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947680, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1854.435594] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea816e2b-4c4b-4be3-96b3-d1b495b52746 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.446456] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-208553e5-eb39-47e7-a7af-884c95d20248 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.459513] env[62405]: DEBUG nova.compute.provider_tree [None req-4317e200-d3a7-46d4-abcb-a7a0c2661d46 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1854.649751] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "b4b89cf6-4159-40fa-8b67-4d8bbf16eb32" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.554s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1854.888268] env[62405]: DEBUG oslo_vmware.api [None req-440e9859-e2e8-4e08-a890-4f5c82c281cc tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947680, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1854.958964] env[62405]: DEBUG nova.network.neutron [req-a9301c84-e0cc-483f-8ef4-c9d30d988e67 req-15fa7e19-862d-4577-9301-00352542ad91 service nova] [instance: 65cd4af4-30cf-4435-8f32-501db450905f] Updated VIF entry in instance network info cache for port 02e1d24b-61e9-485a-8968-37f57cd76b08. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1854.959379] env[62405]: DEBUG nova.network.neutron [req-a9301c84-e0cc-483f-8ef4-c9d30d988e67 req-15fa7e19-862d-4577-9301-00352542ad91 service nova] [instance: 65cd4af4-30cf-4435-8f32-501db450905f] Updating instance_info_cache with network_info: [{"id": "02e1d24b-61e9-485a-8968-37f57cd76b08", "address": "fa:16:3e:d2:fb:59", "network": {"id": "9834dd6d-7842-48af-96fe-a573b230bfce", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1868330878-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.254", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "100991f695df4d998ec39be716228e1d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "975b168a-03e5-449d-95ac-4d51ba027242", "external-id": "nsx-vlan-transportzone-365", "segmentation_id": 365, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap02e1d24b-61", "ovs_interfaceid": "02e1d24b-61e9-485a-8968-37f57cd76b08", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1854.962729] env[62405]: DEBUG nova.scheduler.client.report [None req-4317e200-d3a7-46d4-abcb-a7a0c2661d46 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1855.388358] env[62405]: DEBUG oslo_vmware.api [None req-440e9859-e2e8-4e08-a890-4f5c82c281cc tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947680, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1855.461942] env[62405]: DEBUG oslo_concurrency.lockutils [req-a9301c84-e0cc-483f-8ef4-c9d30d988e67 req-15fa7e19-862d-4577-9301-00352542ad91 service nova] Releasing lock "refresh_cache-65cd4af4-30cf-4435-8f32-501db450905f" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1855.466992] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4317e200-d3a7-46d4-abcb-a7a0c2661d46 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.904s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1855.469339] env[62405]: DEBUG oslo_concurrency.lockutils [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.015s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1855.471838] env[62405]: INFO nova.compute.claims [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 86378df0-a658-427d-aca5-de25f84eb28b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1855.492704] env[62405]: INFO nova.scheduler.client.report [None req-4317e200-d3a7-46d4-abcb-a7a0c2661d46 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Deleted allocations for instance 9aa9e0de-7314-4d8b-8e9f-b6d330cae914 [ 1855.888252] env[62405]: DEBUG oslo_vmware.api [None req-440e9859-e2e8-4e08-a890-4f5c82c281cc tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947680, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1856.001577] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4317e200-d3a7-46d4-abcb-a7a0c2661d46 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Lock "9aa9e0de-7314-4d8b-8e9f-b6d330cae914" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.916s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1856.004578] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "9aa9e0de-7314-4d8b-8e9f-b6d330cae914" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 2.745s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1856.004578] env[62405]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f77c4312-88fd-4bdc-9701-7bc7f4a9916d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.015360] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b8f54b4-2189-483a-aebd-6e6716f4c52c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.138211] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-b3aa5f3d-4f83-45ee-95dc-ac56a1f4e8ba tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] [instance: 34ec55c6-1a7a-4ffa-8efd-9eedd7495d44] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1856.138211] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-b3aa5f3d-4f83-45ee-95dc-ac56a1f4e8ba tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] [instance: 34ec55c6-1a7a-4ffa-8efd-9eedd7495d44] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1856.138211] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3aa5f3d-4f83-45ee-95dc-ac56a1f4e8ba tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Deleting the datastore file [datastore1] 34ec55c6-1a7a-4ffa-8efd-9eedd7495d44 {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1856.138211] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-48ec67b3-b105-493f-bc83-c1fba862c52d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.145689] env[62405]: DEBUG oslo_vmware.api [None req-b3aa5f3d-4f83-45ee-95dc-ac56a1f4e8ba tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Waiting for the task: (returnval){ [ 1856.145689] env[62405]: value = "task-1947681" [ 1856.145689] env[62405]: _type = "Task" [ 1856.145689] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1856.155564] env[62405]: DEBUG oslo_vmware.api [None req-b3aa5f3d-4f83-45ee-95dc-ac56a1f4e8ba tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Task: {'id': task-1947681, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1856.251789] env[62405]: WARNING urllib3.connectionpool [-] Connection pool is full, discarding connection: vc1.osci.c.eu-de-1.cloud.sap. Connection pool size: 10: queue.Full [ 1856.251789] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-84ba1ec7-6780-46b6-bc3d-a93321a3e6b4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: 271cec64-e7b4-4a1b-a7d6-f3fd60086209] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1856.251789] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-84ba1ec7-6780-46b6-bc3d-a93321a3e6b4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: 271cec64-e7b4-4a1b-a7d6-f3fd60086209] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1856.251789] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-84ba1ec7-6780-46b6-bc3d-a93321a3e6b4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Deleting the datastore file [datastore1] 271cec64-e7b4-4a1b-a7d6-f3fd60086209 {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1856.251789] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ee81b509-c9be-4c75-9643-17a7fa08b88e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.260011] env[62405]: DEBUG oslo_vmware.api [None req-84ba1ec7-6780-46b6-bc3d-a93321a3e6b4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Waiting for the task: (returnval){ [ 1856.260011] env[62405]: value = "task-1947682" [ 1856.260011] env[62405]: _type = "Task" [ 1856.260011] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1856.268536] env[62405]: DEBUG oslo_vmware.api [None req-84ba1ec7-6780-46b6-bc3d-a93321a3e6b4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': task-1947682, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1856.390225] env[62405]: DEBUG oslo_vmware.api [None req-440e9859-e2e8-4e08-a890-4f5c82c281cc tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947680, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1856.554657] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "9aa9e0de-7314-4d8b-8e9f-b6d330cae914" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.551s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1856.659044] env[62405]: DEBUG oslo_vmware.api [None req-b3aa5f3d-4f83-45ee-95dc-ac56a1f4e8ba tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Task: {'id': task-1947681, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.209247} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1856.659335] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3aa5f3d-4f83-45ee-95dc-ac56a1f4e8ba tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1856.659542] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-b3aa5f3d-4f83-45ee-95dc-ac56a1f4e8ba tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] [instance: 34ec55c6-1a7a-4ffa-8efd-9eedd7495d44] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1856.659736] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-b3aa5f3d-4f83-45ee-95dc-ac56a1f4e8ba tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] [instance: 34ec55c6-1a7a-4ffa-8efd-9eedd7495d44] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1856.659870] env[62405]: INFO nova.compute.manager [None req-b3aa5f3d-4f83-45ee-95dc-ac56a1f4e8ba tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] [instance: 34ec55c6-1a7a-4ffa-8efd-9eedd7495d44] Took 4.85 seconds to destroy the instance on the hypervisor. [ 1856.660145] env[62405]: DEBUG oslo.service.loopingcall [None req-b3aa5f3d-4f83-45ee-95dc-ac56a1f4e8ba tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1856.660355] env[62405]: DEBUG nova.compute.manager [-] [instance: 34ec55c6-1a7a-4ffa-8efd-9eedd7495d44] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1856.660452] env[62405]: DEBUG nova.network.neutron [-] [instance: 34ec55c6-1a7a-4ffa-8efd-9eedd7495d44] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1856.769898] env[62405]: DEBUG oslo_vmware.api [None req-84ba1ec7-6780-46b6-bc3d-a93321a3e6b4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': task-1947682, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.180046} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1856.772617] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-84ba1ec7-6780-46b6-bc3d-a93321a3e6b4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1856.772996] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-84ba1ec7-6780-46b6-bc3d-a93321a3e6b4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: 271cec64-e7b4-4a1b-a7d6-f3fd60086209] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1856.773104] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-84ba1ec7-6780-46b6-bc3d-a93321a3e6b4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: 271cec64-e7b4-4a1b-a7d6-f3fd60086209] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1856.773268] env[62405]: INFO nova.compute.manager [None req-84ba1ec7-6780-46b6-bc3d-a93321a3e6b4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: 271cec64-e7b4-4a1b-a7d6-f3fd60086209] Took 6.49 seconds to destroy the instance on the hypervisor. [ 1856.773553] env[62405]: DEBUG oslo.service.loopingcall [None req-84ba1ec7-6780-46b6-bc3d-a93321a3e6b4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1856.773966] env[62405]: DEBUG nova.compute.manager [-] [instance: 271cec64-e7b4-4a1b-a7d6-f3fd60086209] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1856.774111] env[62405]: DEBUG nova.network.neutron [-] [instance: 271cec64-e7b4-4a1b-a7d6-f3fd60086209] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1856.840339] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e3acbc1-11cf-4e70-b783-a4c348186b84 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.849066] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43422264-2dc3-4942-b23d-30ed8bd14b09 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.887897] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df5fc18d-1a47-46ee-8050-01a2166f4e0b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.898075] env[62405]: DEBUG oslo_vmware.api [None req-440e9859-e2e8-4e08-a890-4f5c82c281cc tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947680, 'name': ReconfigVM_Task, 'duration_secs': 2.920991} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1856.900576] env[62405]: DEBUG oslo_concurrency.lockutils [None req-440e9859-e2e8-4e08-a890-4f5c82c281cc tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Releasing lock "9d97bf1d-6830-48b1-831b-bf2b52188f32" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1856.901395] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-440e9859-e2e8-4e08-a890-4f5c82c281cc tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Reconfigured VM to attach interface {{(pid=62405) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1856.906793] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5912e88b-4da5-4733-9b27-92e69046afb3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.919705] env[62405]: DEBUG nova.compute.provider_tree [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1856.992873] env[62405]: DEBUG oslo_concurrency.lockutils [None req-1d8dc834-5176-453e-b8fa-4c3e5238a3f6 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Acquiring lock "7256b956-e41a-40ec-a687-a129a8bafcb6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1856.993224] env[62405]: DEBUG oslo_concurrency.lockutils [None req-1d8dc834-5176-453e-b8fa-4c3e5238a3f6 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Lock "7256b956-e41a-40ec-a687-a129a8bafcb6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1856.993500] env[62405]: DEBUG oslo_concurrency.lockutils [None req-1d8dc834-5176-453e-b8fa-4c3e5238a3f6 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Acquiring lock "7256b956-e41a-40ec-a687-a129a8bafcb6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1856.993783] env[62405]: DEBUG oslo_concurrency.lockutils [None req-1d8dc834-5176-453e-b8fa-4c3e5238a3f6 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Lock "7256b956-e41a-40ec-a687-a129a8bafcb6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1856.994035] env[62405]: DEBUG oslo_concurrency.lockutils [None req-1d8dc834-5176-453e-b8fa-4c3e5238a3f6 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Lock "7256b956-e41a-40ec-a687-a129a8bafcb6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1856.998627] env[62405]: INFO nova.compute.manager [None req-1d8dc834-5176-453e-b8fa-4c3e5238a3f6 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 7256b956-e41a-40ec-a687-a129a8bafcb6] Terminating instance [ 1857.186401] env[62405]: DEBUG nova.network.neutron [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] [instance: 171910d2-02b8-4219-ae75-5cecccea1de3] Successfully updated port: 0a8cd850-d9e3-4640-98b7-44b386609be6 {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1857.410217] env[62405]: DEBUG oslo_concurrency.lockutils [None req-440e9859-e2e8-4e08-a890-4f5c82c281cc tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Lock "interface-9d97bf1d-6830-48b1-831b-bf2b52188f32-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 10.587s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1857.446584] env[62405]: ERROR nova.scheduler.client.report [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [req-0ef49ce8-9034-4628-82b5-0e6813bbbcf0] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7d5eded7-a501-4fa6-b1d3-60e273d555d7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-0ef49ce8-9034-4628-82b5-0e6813bbbcf0"}]} [ 1857.464590] env[62405]: DEBUG nova.scheduler.client.report [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Refreshing inventories for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1857.481832] env[62405]: DEBUG nova.scheduler.client.report [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Updating ProviderTree inventory for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1857.482072] env[62405]: DEBUG nova.compute.provider_tree [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1857.496643] env[62405]: DEBUG nova.scheduler.client.report [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Refreshing aggregate associations for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7, aggregates: None {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1857.502308] env[62405]: DEBUG nova.compute.manager [None req-1d8dc834-5176-453e-b8fa-4c3e5238a3f6 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 7256b956-e41a-40ec-a687-a129a8bafcb6] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1857.502503] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-1d8dc834-5176-453e-b8fa-4c3e5238a3f6 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 7256b956-e41a-40ec-a687-a129a8bafcb6] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1857.503387] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fbb38d0-73ca-459f-8152-5e6f1d2adf59 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.510854] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d8dc834-5176-453e-b8fa-4c3e5238a3f6 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 7256b956-e41a-40ec-a687-a129a8bafcb6] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1857.513204] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f8e0ebcf-9603-455d-a61a-301159e38b6a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.518110] env[62405]: DEBUG oslo_vmware.api [None req-1d8dc834-5176-453e-b8fa-4c3e5238a3f6 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Waiting for the task: (returnval){ [ 1857.518110] env[62405]: value = "task-1947683" [ 1857.518110] env[62405]: _type = "Task" [ 1857.518110] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1857.518932] env[62405]: DEBUG nova.scheduler.client.report [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Refreshing trait associations for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1857.529524] env[62405]: DEBUG oslo_vmware.api [None req-1d8dc834-5176-453e-b8fa-4c3e5238a3f6 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Task: {'id': task-1947683, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1857.670427] env[62405]: DEBUG nova.network.neutron [-] [instance: 271cec64-e7b4-4a1b-a7d6-f3fd60086209] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1857.866778] env[62405]: DEBUG nova.network.neutron [-] [instance: 34ec55c6-1a7a-4ffa-8efd-9eedd7495d44] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1857.872690] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26656bd8-70aa-4b3d-aa67-e8d7ae1f08b6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.881407] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d097adc9-ca7d-42c0-8eec-ab16f5ecea84 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.913951] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7471966e-d411-43d2-b0dd-54231c7bcdf3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.924447] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5ef6c73-4100-424a-8ad1-cefed66a1edc {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.941069] env[62405]: DEBUG nova.compute.provider_tree [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1858.032477] env[62405]: DEBUG oslo_vmware.api [None req-1d8dc834-5176-453e-b8fa-4c3e5238a3f6 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Task: {'id': task-1947683, 'name': PowerOffVM_Task, 'duration_secs': 0.173463} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1858.032727] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d8dc834-5176-453e-b8fa-4c3e5238a3f6 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 7256b956-e41a-40ec-a687-a129a8bafcb6] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1858.032935] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-1d8dc834-5176-453e-b8fa-4c3e5238a3f6 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 7256b956-e41a-40ec-a687-a129a8bafcb6] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1858.033254] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-acbb5e70-3971-4a27-aa3b-4720a1cc6bfc {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.173484] env[62405]: INFO nova.compute.manager [-] [instance: 271cec64-e7b4-4a1b-a7d6-f3fd60086209] Took 1.40 seconds to deallocate network for instance. [ 1858.197951] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-1d8dc834-5176-453e-b8fa-4c3e5238a3f6 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 7256b956-e41a-40ec-a687-a129a8bafcb6] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1858.198668] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-1d8dc834-5176-453e-b8fa-4c3e5238a3f6 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 7256b956-e41a-40ec-a687-a129a8bafcb6] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1858.198668] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d8dc834-5176-453e-b8fa-4c3e5238a3f6 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Deleting the datastore file [datastore1] 7256b956-e41a-40ec-a687-a129a8bafcb6 {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1858.198943] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7e271daf-4de7-43a7-bf22-c98bd2710f34 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.205938] env[62405]: DEBUG oslo_vmware.api [None req-1d8dc834-5176-453e-b8fa-4c3e5238a3f6 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Waiting for the task: (returnval){ [ 1858.205938] env[62405]: value = "task-1947685" [ 1858.205938] env[62405]: _type = "Task" [ 1858.205938] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1858.215477] env[62405]: DEBUG oslo_vmware.api [None req-1d8dc834-5176-453e-b8fa-4c3e5238a3f6 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Task: {'id': task-1947685, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1858.350601] env[62405]: DEBUG nova.compute.manager [req-05160e61-dd65-4069-acaf-0cba57ae182e req-92ad80c7-4f12-4871-8090-4de612754e74 service nova] [instance: 171910d2-02b8-4219-ae75-5cecccea1de3] Received event network-vif-plugged-0a8cd850-d9e3-4640-98b7-44b386609be6 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1858.351109] env[62405]: DEBUG oslo_concurrency.lockutils [req-05160e61-dd65-4069-acaf-0cba57ae182e req-92ad80c7-4f12-4871-8090-4de612754e74 service nova] Acquiring lock "171910d2-02b8-4219-ae75-5cecccea1de3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1858.351397] env[62405]: DEBUG oslo_concurrency.lockutils [req-05160e61-dd65-4069-acaf-0cba57ae182e req-92ad80c7-4f12-4871-8090-4de612754e74 service nova] Lock "171910d2-02b8-4219-ae75-5cecccea1de3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1858.351684] env[62405]: DEBUG oslo_concurrency.lockutils [req-05160e61-dd65-4069-acaf-0cba57ae182e req-92ad80c7-4f12-4871-8090-4de612754e74 service nova] Lock "171910d2-02b8-4219-ae75-5cecccea1de3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1858.352064] env[62405]: DEBUG nova.compute.manager [req-05160e61-dd65-4069-acaf-0cba57ae182e req-92ad80c7-4f12-4871-8090-4de612754e74 service nova] [instance: 171910d2-02b8-4219-ae75-5cecccea1de3] No waiting events found dispatching network-vif-plugged-0a8cd850-d9e3-4640-98b7-44b386609be6 {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1858.352284] env[62405]: WARNING nova.compute.manager [req-05160e61-dd65-4069-acaf-0cba57ae182e req-92ad80c7-4f12-4871-8090-4de612754e74 service nova] [instance: 171910d2-02b8-4219-ae75-5cecccea1de3] Received unexpected event network-vif-plugged-0a8cd850-d9e3-4640-98b7-44b386609be6 for instance with vm_state building and task_state spawning. [ 1858.377011] env[62405]: INFO nova.compute.manager [-] [instance: 34ec55c6-1a7a-4ffa-8efd-9eedd7495d44] Took 1.72 seconds to deallocate network for instance. [ 1858.467903] env[62405]: DEBUG nova.compute.manager [req-038b0c48-b1e1-428b-b5be-b96eebcbaa22 req-46c871ac-8868-4fd7-a643-1ecd0a871179 service nova] [instance: 34ec55c6-1a7a-4ffa-8efd-9eedd7495d44] Received event network-vif-deleted-6afd5e2e-fe5f-4f25-a879-a25672a67740 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1858.475452] env[62405]: DEBUG nova.scheduler.client.report [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Updated inventory for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with generation 125 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1858.475699] env[62405]: DEBUG nova.compute.provider_tree [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Updating resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 generation from 125 to 126 during operation: update_inventory {{(pid=62405) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1858.475882] env[62405]: DEBUG nova.compute.provider_tree [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1858.680277] env[62405]: DEBUG oslo_concurrency.lockutils [None req-84ba1ec7-6780-46b6-bc3d-a93321a3e6b4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1858.716415] env[62405]: DEBUG oslo_vmware.api [None req-1d8dc834-5176-453e-b8fa-4c3e5238a3f6 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Task: {'id': task-1947685, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.160084} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1858.716705] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d8dc834-5176-453e-b8fa-4c3e5238a3f6 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1858.716923] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-1d8dc834-5176-453e-b8fa-4c3e5238a3f6 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 7256b956-e41a-40ec-a687-a129a8bafcb6] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1858.717149] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-1d8dc834-5176-453e-b8fa-4c3e5238a3f6 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 7256b956-e41a-40ec-a687-a129a8bafcb6] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1858.717330] env[62405]: INFO nova.compute.manager [None req-1d8dc834-5176-453e-b8fa-4c3e5238a3f6 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] [instance: 7256b956-e41a-40ec-a687-a129a8bafcb6] Took 1.21 seconds to destroy the instance on the hypervisor. [ 1858.717585] env[62405]: DEBUG oslo.service.loopingcall [None req-1d8dc834-5176-453e-b8fa-4c3e5238a3f6 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1858.717782] env[62405]: DEBUG nova.compute.manager [-] [instance: 7256b956-e41a-40ec-a687-a129a8bafcb6] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1858.717877] env[62405]: DEBUG nova.network.neutron [-] [instance: 7256b956-e41a-40ec-a687-a129a8bafcb6] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1858.884015] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b3aa5f3d-4f83-45ee-95dc-ac56a1f4e8ba tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1858.982452] env[62405]: DEBUG oslo_concurrency.lockutils [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.513s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1858.983270] env[62405]: DEBUG nova.compute.manager [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 86378df0-a658-427d-aca5-de25f84eb28b] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1858.986614] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2f61574f-5e5b-45da-9e95-74ee6dbbd524 tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.477s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1858.986614] env[62405]: DEBUG nova.objects.instance [None req-2f61574f-5e5b-45da-9e95-74ee6dbbd524 tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Lazy-loading 'resources' on Instance uuid 73c5b28f-d21d-4ffc-9e67-911e4fb4db66 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1859.486029] env[62405]: DEBUG nova.network.neutron [-] [instance: 7256b956-e41a-40ec-a687-a129a8bafcb6] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1859.488312] env[62405]: DEBUG nova.compute.utils [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1859.500466] env[62405]: DEBUG nova.compute.manager [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 86378df0-a658-427d-aca5-de25f84eb28b] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1859.500466] env[62405]: DEBUG nova.network.neutron [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 86378df0-a658-427d-aca5-de25f84eb28b] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1859.511058] env[62405]: DEBUG nova.network.neutron [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] [instance: 171910d2-02b8-4219-ae75-5cecccea1de3] Successfully updated port: 2b494e96-08e7-4608-a930-5d9da520c342 {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1859.543900] env[62405]: DEBUG nova.policy [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4ab5cc5829014c4ebafbf88400b22a8c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5ba2fba100b943a2a415ec37b9365388', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1859.800016] env[62405]: DEBUG nova.network.neutron [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 86378df0-a658-427d-aca5-de25f84eb28b] Successfully created port: f58e9a5c-89b1-4aff-8825-ba44d10e8d46 {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1859.849789] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-739b4a20-908a-4b78-a176-25c99f91d607 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1859.857858] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26040604-e4f0-4605-92f9-54ac36370d1d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1859.892749] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17d5cfc1-0817-49cf-a94e-c6ff8fa67aa1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1859.900780] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3327354b-6f9d-4d3f-8850-f83588dd0963 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1859.915010] env[62405]: DEBUG nova.compute.provider_tree [None req-2f61574f-5e5b-45da-9e95-74ee6dbbd524 tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1859.949092] env[62405]: DEBUG oslo_concurrency.lockutils [None req-968eeebd-6ba3-4286-a9d8-e474031bb253 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquiring lock "interface-9d97bf1d-6830-48b1-831b-bf2b52188f32-bfef94b3-682e-48fb-8149-02040e229cfb" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1859.949377] env[62405]: DEBUG oslo_concurrency.lockutils [None req-968eeebd-6ba3-4286-a9d8-e474031bb253 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Lock "interface-9d97bf1d-6830-48b1-831b-bf2b52188f32-bfef94b3-682e-48fb-8149-02040e229cfb" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1859.949784] env[62405]: DEBUG nova.objects.instance [None req-968eeebd-6ba3-4286-a9d8-e474031bb253 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Lazy-loading 'flavor' on Instance uuid 9d97bf1d-6830-48b1-831b-bf2b52188f32 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1859.997561] env[62405]: INFO nova.compute.manager [-] [instance: 7256b956-e41a-40ec-a687-a129a8bafcb6] Took 1.28 seconds to deallocate network for instance. [ 1860.003522] env[62405]: DEBUG nova.compute.manager [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 86378df0-a658-427d-aca5-de25f84eb28b] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1860.012866] env[62405]: DEBUG oslo_concurrency.lockutils [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Acquiring lock "refresh_cache-171910d2-02b8-4219-ae75-5cecccea1de3" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1860.012974] env[62405]: DEBUG oslo_concurrency.lockutils [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Acquired lock "refresh_cache-171910d2-02b8-4219-ae75-5cecccea1de3" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1860.013203] env[62405]: DEBUG nova.network.neutron [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] [instance: 171910d2-02b8-4219-ae75-5cecccea1de3] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1860.422667] env[62405]: DEBUG nova.scheduler.client.report [None req-2f61574f-5e5b-45da-9e95-74ee6dbbd524 tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1860.507492] env[62405]: DEBUG oslo_concurrency.lockutils [None req-1d8dc834-5176-453e-b8fa-4c3e5238a3f6 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1860.549186] env[62405]: DEBUG nova.network.neutron [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] [instance: 171910d2-02b8-4219-ae75-5cecccea1de3] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1860.574147] env[62405]: DEBUG nova.objects.instance [None req-968eeebd-6ba3-4286-a9d8-e474031bb253 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Lazy-loading 'pci_requests' on Instance uuid 9d97bf1d-6830-48b1-831b-bf2b52188f32 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1860.588538] env[62405]: DEBUG nova.compute.manager [req-ed8dffdf-d933-4547-8ee0-eafb031e7db7 req-2911524f-b82d-4eac-a1d4-404306f7f65c service nova] [instance: 271cec64-e7b4-4a1b-a7d6-f3fd60086209] Received event network-vif-deleted-37b28c39-c3f7-4c42-b4a2-3b9836cf0ded {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1860.588742] env[62405]: DEBUG nova.compute.manager [req-ed8dffdf-d933-4547-8ee0-eafb031e7db7 req-2911524f-b82d-4eac-a1d4-404306f7f65c service nova] [instance: 171910d2-02b8-4219-ae75-5cecccea1de3] Received event network-changed-0a8cd850-d9e3-4640-98b7-44b386609be6 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1860.588905] env[62405]: DEBUG nova.compute.manager [req-ed8dffdf-d933-4547-8ee0-eafb031e7db7 req-2911524f-b82d-4eac-a1d4-404306f7f65c service nova] [instance: 171910d2-02b8-4219-ae75-5cecccea1de3] Refreshing instance network info cache due to event network-changed-0a8cd850-d9e3-4640-98b7-44b386609be6. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1860.589096] env[62405]: DEBUG oslo_concurrency.lockutils [req-ed8dffdf-d933-4547-8ee0-eafb031e7db7 req-2911524f-b82d-4eac-a1d4-404306f7f65c service nova] Acquiring lock "refresh_cache-171910d2-02b8-4219-ae75-5cecccea1de3" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1860.607518] env[62405]: DEBUG nova.compute.manager [req-82933477-9a70-44a2-8810-32bbf913c5cf req-5e7548d0-f5b5-4b1a-8a2a-624dfacc3933 service nova] [instance: 65cd4af4-30cf-4435-8f32-501db450905f] Received event network-changed-02e1d24b-61e9-485a-8968-37f57cd76b08 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1860.607702] env[62405]: DEBUG nova.compute.manager [req-82933477-9a70-44a2-8810-32bbf913c5cf req-5e7548d0-f5b5-4b1a-8a2a-624dfacc3933 service nova] [instance: 65cd4af4-30cf-4435-8f32-501db450905f] Refreshing instance network info cache due to event network-changed-02e1d24b-61e9-485a-8968-37f57cd76b08. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1860.607906] env[62405]: DEBUG oslo_concurrency.lockutils [req-82933477-9a70-44a2-8810-32bbf913c5cf req-5e7548d0-f5b5-4b1a-8a2a-624dfacc3933 service nova] Acquiring lock "refresh_cache-65cd4af4-30cf-4435-8f32-501db450905f" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1860.608061] env[62405]: DEBUG oslo_concurrency.lockutils [req-82933477-9a70-44a2-8810-32bbf913c5cf req-5e7548d0-f5b5-4b1a-8a2a-624dfacc3933 service nova] Acquired lock "refresh_cache-65cd4af4-30cf-4435-8f32-501db450905f" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1860.608284] env[62405]: DEBUG nova.network.neutron [req-82933477-9a70-44a2-8810-32bbf913c5cf req-5e7548d0-f5b5-4b1a-8a2a-624dfacc3933 service nova] [instance: 65cd4af4-30cf-4435-8f32-501db450905f] Refreshing network info cache for port 02e1d24b-61e9-485a-8968-37f57cd76b08 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1860.634484] env[62405]: DEBUG oslo_concurrency.lockutils [None req-bada42d0-8459-4144-ae42-917e90422d8b tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Acquiring lock "65cd4af4-30cf-4435-8f32-501db450905f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1860.634678] env[62405]: DEBUG oslo_concurrency.lockutils [None req-bada42d0-8459-4144-ae42-917e90422d8b tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Lock "65cd4af4-30cf-4435-8f32-501db450905f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1860.634882] env[62405]: DEBUG oslo_concurrency.lockutils [None req-bada42d0-8459-4144-ae42-917e90422d8b tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Acquiring lock "65cd4af4-30cf-4435-8f32-501db450905f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1860.635098] env[62405]: DEBUG oslo_concurrency.lockutils [None req-bada42d0-8459-4144-ae42-917e90422d8b tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Lock "65cd4af4-30cf-4435-8f32-501db450905f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1860.635291] env[62405]: DEBUG oslo_concurrency.lockutils [None req-bada42d0-8459-4144-ae42-917e90422d8b tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Lock "65cd4af4-30cf-4435-8f32-501db450905f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1860.638544] env[62405]: INFO nova.compute.manager [None req-bada42d0-8459-4144-ae42-917e90422d8b tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] [instance: 65cd4af4-30cf-4435-8f32-501db450905f] Terminating instance [ 1860.837796] env[62405]: DEBUG nova.network.neutron [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] [instance: 171910d2-02b8-4219-ae75-5cecccea1de3] Updating instance_info_cache with network_info: [{"id": "0a8cd850-d9e3-4640-98b7-44b386609be6", "address": "fa:16:3e:ac:d6:6d", "network": {"id": "d2fb9418-fb56-44ed-b77d-31d4e09569bd", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-509329953", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.94", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e0d6dfea772e432289163b14e9e341c1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b6942d7-d4ab-4b2a-8d0f-76bf2a2478ad", "external-id": "nsx-vlan-transportzone-871", "segmentation_id": 871, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0a8cd850-d9", "ovs_interfaceid": "0a8cd850-d9e3-4640-98b7-44b386609be6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "2b494e96-08e7-4608-a930-5d9da520c342", "address": "fa:16:3e:45:c3:cf", "network": {"id": "daafb1e7-9436-4cb4-8856-6378822f38ea", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1626101583", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.40", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "e0d6dfea772e432289163b14e9e341c1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b4d548e7-d762-406a-bb2d-dc7168a8ca67", "external-id": "nsx-vlan-transportzone-796", "segmentation_id": 796, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2b494e96-08", "ovs_interfaceid": "2b494e96-08e7-4608-a930-5d9da520c342", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1860.928431] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2f61574f-5e5b-45da-9e95-74ee6dbbd524 tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.943s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1860.931032] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9a95d002-c96e-4973-8d29-a5abe39025ec tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.085s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1860.931032] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9a95d002-c96e-4973-8d29-a5abe39025ec tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1860.932502] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4eb17867-a80f-4d5a-af1b-ffbdf0589cd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.985s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1860.933970] env[62405]: INFO nova.compute.claims [None req-4eb17867-a80f-4d5a-af1b-ffbdf0589cd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: 81aebf11-5d80-4a86-b232-3ecc5f3892c2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1860.954662] env[62405]: INFO nova.scheduler.client.report [None req-2f61574f-5e5b-45da-9e95-74ee6dbbd524 tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Deleted allocations for instance 73c5b28f-d21d-4ffc-9e67-911e4fb4db66 [ 1860.959814] env[62405]: INFO nova.scheduler.client.report [None req-9a95d002-c96e-4973-8d29-a5abe39025ec tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Deleted allocations for instance 15218373-ffa5-49ce-b604-423b7fc5fb35 [ 1861.012444] env[62405]: DEBUG nova.compute.manager [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 86378df0-a658-427d-aca5-de25f84eb28b] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1861.038453] env[62405]: DEBUG nova.virt.hardware [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1861.038690] env[62405]: DEBUG nova.virt.hardware [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1861.038850] env[62405]: DEBUG nova.virt.hardware [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1861.039044] env[62405]: DEBUG nova.virt.hardware [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1861.039202] env[62405]: DEBUG nova.virt.hardware [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1861.039358] env[62405]: DEBUG nova.virt.hardware [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1861.039570] env[62405]: DEBUG nova.virt.hardware [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1861.039735] env[62405]: DEBUG nova.virt.hardware [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1861.039902] env[62405]: DEBUG nova.virt.hardware [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1861.040086] env[62405]: DEBUG nova.virt.hardware [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1861.040269] env[62405]: DEBUG nova.virt.hardware [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1861.041117] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c279ebf-5792-4237-81c7-f218db7ed42d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.049378] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3be41087-2158-4d96-be5a-ba445e3026a4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.076099] env[62405]: DEBUG nova.objects.base [None req-968eeebd-6ba3-4286-a9d8-e474031bb253 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Object Instance<9d97bf1d-6830-48b1-831b-bf2b52188f32> lazy-loaded attributes: flavor,pci_requests {{(pid=62405) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1861.076297] env[62405]: DEBUG nova.network.neutron [None req-968eeebd-6ba3-4286-a9d8-e474031bb253 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1861.141219] env[62405]: DEBUG nova.compute.manager [None req-bada42d0-8459-4144-ae42-917e90422d8b tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] [instance: 65cd4af4-30cf-4435-8f32-501db450905f] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1861.141541] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-bada42d0-8459-4144-ae42-917e90422d8b tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] [instance: 65cd4af4-30cf-4435-8f32-501db450905f] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1861.142450] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d489c103-c2f6-425c-9711-baed59a0ce61 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.151254] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-bada42d0-8459-4144-ae42-917e90422d8b tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] [instance: 65cd4af4-30cf-4435-8f32-501db450905f] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1861.151438] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-55fd6a12-340d-4bcb-8537-6980f490f74f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.158916] env[62405]: DEBUG oslo_vmware.api [None req-bada42d0-8459-4144-ae42-917e90422d8b tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Waiting for the task: (returnval){ [ 1861.158916] env[62405]: value = "task-1947686" [ 1861.158916] env[62405]: _type = "Task" [ 1861.158916] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1861.168571] env[62405]: DEBUG oslo_vmware.api [None req-bada42d0-8459-4144-ae42-917e90422d8b tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Task: {'id': task-1947686, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1861.172901] env[62405]: DEBUG nova.policy [None req-968eeebd-6ba3-4286-a9d8-e474031bb253 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '13540c2dbc2b43bcb151ec7b5894904c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ba9083cddcc24345b6ea5d2cbbbec5ba', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1861.340630] env[62405]: DEBUG oslo_concurrency.lockutils [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Releasing lock "refresh_cache-171910d2-02b8-4219-ae75-5cecccea1de3" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1861.341092] env[62405]: DEBUG nova.compute.manager [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] [instance: 171910d2-02b8-4219-ae75-5cecccea1de3] Instance network_info: |[{"id": "0a8cd850-d9e3-4640-98b7-44b386609be6", "address": "fa:16:3e:ac:d6:6d", "network": {"id": "d2fb9418-fb56-44ed-b77d-31d4e09569bd", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-509329953", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.94", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e0d6dfea772e432289163b14e9e341c1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b6942d7-d4ab-4b2a-8d0f-76bf2a2478ad", "external-id": "nsx-vlan-transportzone-871", "segmentation_id": 871, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0a8cd850-d9", "ovs_interfaceid": "0a8cd850-d9e3-4640-98b7-44b386609be6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "2b494e96-08e7-4608-a930-5d9da520c342", "address": "fa:16:3e:45:c3:cf", "network": {"id": "daafb1e7-9436-4cb4-8856-6378822f38ea", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1626101583", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.40", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "e0d6dfea772e432289163b14e9e341c1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b4d548e7-d762-406a-bb2d-dc7168a8ca67", "external-id": "nsx-vlan-transportzone-796", "segmentation_id": 796, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2b494e96-08", "ovs_interfaceid": "2b494e96-08e7-4608-a930-5d9da520c342", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1861.341502] env[62405]: DEBUG oslo_concurrency.lockutils [req-ed8dffdf-d933-4547-8ee0-eafb031e7db7 req-2911524f-b82d-4eac-a1d4-404306f7f65c service nova] Acquired lock "refresh_cache-171910d2-02b8-4219-ae75-5cecccea1de3" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1861.341712] env[62405]: DEBUG nova.network.neutron [req-ed8dffdf-d933-4547-8ee0-eafb031e7db7 req-2911524f-b82d-4eac-a1d4-404306f7f65c service nova] [instance: 171910d2-02b8-4219-ae75-5cecccea1de3] Refreshing network info cache for port 0a8cd850-d9e3-4640-98b7-44b386609be6 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1861.347028] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] [instance: 171910d2-02b8-4219-ae75-5cecccea1de3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ac:d6:6d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6b6942d7-d4ab-4b2a-8d0f-76bf2a2478ad', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0a8cd850-d9e3-4640-98b7-44b386609be6', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:45:c3:cf', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b4d548e7-d762-406a-bb2d-dc7168a8ca67', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2b494e96-08e7-4608-a930-5d9da520c342', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1861.354491] env[62405]: DEBUG oslo.service.loopingcall [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1861.358362] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 171910d2-02b8-4219-ae75-5cecccea1de3] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1861.358907] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-01459ab2-c306-4bf7-b84e-62cc9bd55b9c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.380317] env[62405]: DEBUG nova.network.neutron [req-82933477-9a70-44a2-8810-32bbf913c5cf req-5e7548d0-f5b5-4b1a-8a2a-624dfacc3933 service nova] [instance: 65cd4af4-30cf-4435-8f32-501db450905f] Updated VIF entry in instance network info cache for port 02e1d24b-61e9-485a-8968-37f57cd76b08. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1861.380647] env[62405]: DEBUG nova.network.neutron [req-82933477-9a70-44a2-8810-32bbf913c5cf req-5e7548d0-f5b5-4b1a-8a2a-624dfacc3933 service nova] [instance: 65cd4af4-30cf-4435-8f32-501db450905f] Updating instance_info_cache with network_info: [{"id": "02e1d24b-61e9-485a-8968-37f57cd76b08", "address": "fa:16:3e:d2:fb:59", "network": {"id": "9834dd6d-7842-48af-96fe-a573b230bfce", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1868330878-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "100991f695df4d998ec39be716228e1d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "975b168a-03e5-449d-95ac-4d51ba027242", "external-id": "nsx-vlan-transportzone-365", "segmentation_id": 365, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap02e1d24b-61", "ovs_interfaceid": "02e1d24b-61e9-485a-8968-37f57cd76b08", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1861.389824] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1861.389824] env[62405]: value = "task-1947687" [ 1861.389824] env[62405]: _type = "Task" [ 1861.389824] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1861.400819] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947687, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1861.467418] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2f61574f-5e5b-45da-9e95-74ee6dbbd524 tempest-DeleteServersAdminTestJSON-768354058 tempest-DeleteServersAdminTestJSON-768354058-project-member] Lock "73c5b28f-d21d-4ffc-9e67-911e4fb4db66" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.653s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1861.471610] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "73c5b28f-d21d-4ffc-9e67-911e4fb4db66" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 8.209s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1861.471610] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9a95d002-c96e-4973-8d29-a5abe39025ec tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Lock "15218373-ffa5-49ce-b604-423b7fc5fb35" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.368s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1861.472602] env[62405]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4897a164-2708-408c-978c-f88e8091461d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.475273] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "15218373-ffa5-49ce-b604-423b7fc5fb35" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 8.221s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1861.475273] env[62405]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0876519a-e52d-456a-875e-c9bcec8d84ef {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.487053] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6709eae5-6ce8-4aa1-9ca8-ad21b7dbf0a6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.503307] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7034c8b0-4a30-4369-ab6d-6cc88628547e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.546066] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "15218373-ffa5-49ce-b604-423b7fc5fb35" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.071s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1861.621522] env[62405]: DEBUG nova.network.neutron [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 86378df0-a658-427d-aca5-de25f84eb28b] Successfully updated port: f58e9a5c-89b1-4aff-8825-ba44d10e8d46 {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1861.671802] env[62405]: DEBUG oslo_vmware.api [None req-bada42d0-8459-4144-ae42-917e90422d8b tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Task: {'id': task-1947686, 'name': PowerOffVM_Task, 'duration_secs': 0.195213} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1861.672096] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-bada42d0-8459-4144-ae42-917e90422d8b tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] [instance: 65cd4af4-30cf-4435-8f32-501db450905f] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1861.672276] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-bada42d0-8459-4144-ae42-917e90422d8b tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] [instance: 65cd4af4-30cf-4435-8f32-501db450905f] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1861.672513] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-66747745-1826-46e5-9adb-c3f14289b50a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.727393] env[62405]: DEBUG nova.network.neutron [req-ed8dffdf-d933-4547-8ee0-eafb031e7db7 req-2911524f-b82d-4eac-a1d4-404306f7f65c service nova] [instance: 171910d2-02b8-4219-ae75-5cecccea1de3] Updated VIF entry in instance network info cache for port 0a8cd850-d9e3-4640-98b7-44b386609be6. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1861.727872] env[62405]: DEBUG nova.network.neutron [req-ed8dffdf-d933-4547-8ee0-eafb031e7db7 req-2911524f-b82d-4eac-a1d4-404306f7f65c service nova] [instance: 171910d2-02b8-4219-ae75-5cecccea1de3] Updating instance_info_cache with network_info: [{"id": "0a8cd850-d9e3-4640-98b7-44b386609be6", "address": "fa:16:3e:ac:d6:6d", "network": {"id": "d2fb9418-fb56-44ed-b77d-31d4e09569bd", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-509329953", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.94", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e0d6dfea772e432289163b14e9e341c1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b6942d7-d4ab-4b2a-8d0f-76bf2a2478ad", "external-id": "nsx-vlan-transportzone-871", "segmentation_id": 871, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0a8cd850-d9", "ovs_interfaceid": "0a8cd850-d9e3-4640-98b7-44b386609be6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "2b494e96-08e7-4608-a930-5d9da520c342", "address": "fa:16:3e:45:c3:cf", "network": {"id": "daafb1e7-9436-4cb4-8856-6378822f38ea", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1626101583", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.40", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "e0d6dfea772e432289163b14e9e341c1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b4d548e7-d762-406a-bb2d-dc7168a8ca67", "external-id": "nsx-vlan-transportzone-796", "segmentation_id": 796, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2b494e96-08", "ovs_interfaceid": "2b494e96-08e7-4608-a930-5d9da520c342", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1861.840417] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-bada42d0-8459-4144-ae42-917e90422d8b tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] [instance: 65cd4af4-30cf-4435-8f32-501db450905f] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1861.840645] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-bada42d0-8459-4144-ae42-917e90422d8b tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] [instance: 65cd4af4-30cf-4435-8f32-501db450905f] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1861.840829] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-bada42d0-8459-4144-ae42-917e90422d8b tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Deleting the datastore file [datastore1] 65cd4af4-30cf-4435-8f32-501db450905f {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1861.841549] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-15db4360-5cea-4340-86fd-8a80e98c5904 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.849065] env[62405]: DEBUG oslo_vmware.api [None req-bada42d0-8459-4144-ae42-917e90422d8b tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Waiting for the task: (returnval){ [ 1861.849065] env[62405]: value = "task-1947689" [ 1861.849065] env[62405]: _type = "Task" [ 1861.849065] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1861.857114] env[62405]: DEBUG oslo_vmware.api [None req-bada42d0-8459-4144-ae42-917e90422d8b tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Task: {'id': task-1947689, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1861.883463] env[62405]: DEBUG oslo_concurrency.lockutils [req-82933477-9a70-44a2-8810-32bbf913c5cf req-5e7548d0-f5b5-4b1a-8a2a-624dfacc3933 service nova] Releasing lock "refresh_cache-65cd4af4-30cf-4435-8f32-501db450905f" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1861.883764] env[62405]: DEBUG nova.compute.manager [req-82933477-9a70-44a2-8810-32bbf913c5cf req-5e7548d0-f5b5-4b1a-8a2a-624dfacc3933 service nova] [instance: 171910d2-02b8-4219-ae75-5cecccea1de3] Received event network-vif-plugged-2b494e96-08e7-4608-a930-5d9da520c342 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1861.883972] env[62405]: DEBUG oslo_concurrency.lockutils [req-82933477-9a70-44a2-8810-32bbf913c5cf req-5e7548d0-f5b5-4b1a-8a2a-624dfacc3933 service nova] Acquiring lock "171910d2-02b8-4219-ae75-5cecccea1de3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1861.884201] env[62405]: DEBUG oslo_concurrency.lockutils [req-82933477-9a70-44a2-8810-32bbf913c5cf req-5e7548d0-f5b5-4b1a-8a2a-624dfacc3933 service nova] Lock "171910d2-02b8-4219-ae75-5cecccea1de3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1861.884383] env[62405]: DEBUG oslo_concurrency.lockutils [req-82933477-9a70-44a2-8810-32bbf913c5cf req-5e7548d0-f5b5-4b1a-8a2a-624dfacc3933 service nova] Lock "171910d2-02b8-4219-ae75-5cecccea1de3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1861.884554] env[62405]: DEBUG nova.compute.manager [req-82933477-9a70-44a2-8810-32bbf913c5cf req-5e7548d0-f5b5-4b1a-8a2a-624dfacc3933 service nova] [instance: 171910d2-02b8-4219-ae75-5cecccea1de3] No waiting events found dispatching network-vif-plugged-2b494e96-08e7-4608-a930-5d9da520c342 {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1861.884723] env[62405]: WARNING nova.compute.manager [req-82933477-9a70-44a2-8810-32bbf913c5cf req-5e7548d0-f5b5-4b1a-8a2a-624dfacc3933 service nova] [instance: 171910d2-02b8-4219-ae75-5cecccea1de3] Received unexpected event network-vif-plugged-2b494e96-08e7-4608-a930-5d9da520c342 for instance with vm_state building and task_state spawning. [ 1861.884882] env[62405]: DEBUG nova.compute.manager [req-82933477-9a70-44a2-8810-32bbf913c5cf req-5e7548d0-f5b5-4b1a-8a2a-624dfacc3933 service nova] [instance: 171910d2-02b8-4219-ae75-5cecccea1de3] Received event network-changed-2b494e96-08e7-4608-a930-5d9da520c342 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1861.885079] env[62405]: DEBUG nova.compute.manager [req-82933477-9a70-44a2-8810-32bbf913c5cf req-5e7548d0-f5b5-4b1a-8a2a-624dfacc3933 service nova] [instance: 171910d2-02b8-4219-ae75-5cecccea1de3] Refreshing instance network info cache due to event network-changed-2b494e96-08e7-4608-a930-5d9da520c342. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1861.885270] env[62405]: DEBUG oslo_concurrency.lockutils [req-82933477-9a70-44a2-8810-32bbf913c5cf req-5e7548d0-f5b5-4b1a-8a2a-624dfacc3933 service nova] Acquiring lock "refresh_cache-171910d2-02b8-4219-ae75-5cecccea1de3" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1861.897560] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947687, 'name': CreateVM_Task} progress is 25%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1862.049182] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "73c5b28f-d21d-4ffc-9e67-911e4fb4db66" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.580s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1862.124684] env[62405]: DEBUG oslo_concurrency.lockutils [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Acquiring lock "refresh_cache-86378df0-a658-427d-aca5-de25f84eb28b" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1862.124833] env[62405]: DEBUG oslo_concurrency.lockutils [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Acquired lock "refresh_cache-86378df0-a658-427d-aca5-de25f84eb28b" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1862.124980] env[62405]: DEBUG nova.network.neutron [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 86378df0-a658-427d-aca5-de25f84eb28b] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1862.215302] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ea305df-e574-4063-b4d6-31ab7d273048 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.222970] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c081517-6555-4bc3-840f-5300b37c8abf {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.252335] env[62405]: DEBUG oslo_concurrency.lockutils [req-ed8dffdf-d933-4547-8ee0-eafb031e7db7 req-2911524f-b82d-4eac-a1d4-404306f7f65c service nova] Releasing lock "refresh_cache-171910d2-02b8-4219-ae75-5cecccea1de3" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1862.252593] env[62405]: DEBUG nova.compute.manager [req-ed8dffdf-d933-4547-8ee0-eafb031e7db7 req-2911524f-b82d-4eac-a1d4-404306f7f65c service nova] [instance: 7256b956-e41a-40ec-a687-a129a8bafcb6] Received event network-vif-deleted-a6bb60c9-208a-4c73-96e1-13626d7d1dd8 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1862.253237] env[62405]: DEBUG oslo_concurrency.lockutils [req-82933477-9a70-44a2-8810-32bbf913c5cf req-5e7548d0-f5b5-4b1a-8a2a-624dfacc3933 service nova] Acquired lock "refresh_cache-171910d2-02b8-4219-ae75-5cecccea1de3" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1862.253423] env[62405]: DEBUG nova.network.neutron [req-82933477-9a70-44a2-8810-32bbf913c5cf req-5e7548d0-f5b5-4b1a-8a2a-624dfacc3933 service nova] [instance: 171910d2-02b8-4219-ae75-5cecccea1de3] Refreshing network info cache for port 2b494e96-08e7-4608-a930-5d9da520c342 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1862.254960] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47fbf141-ea4c-4d8c-8bef-68f414955ca1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.263440] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36ccd080-7f25-400a-ad36-db3dc20251f2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.277696] env[62405]: DEBUG nova.compute.provider_tree [None req-4eb17867-a80f-4d5a-af1b-ffbdf0589cd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1862.359703] env[62405]: DEBUG oslo_vmware.api [None req-bada42d0-8459-4144-ae42-917e90422d8b tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Task: {'id': task-1947689, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.19456} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1862.359967] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-bada42d0-8459-4144-ae42-917e90422d8b tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1862.360175] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-bada42d0-8459-4144-ae42-917e90422d8b tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] [instance: 65cd4af4-30cf-4435-8f32-501db450905f] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1862.360410] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-bada42d0-8459-4144-ae42-917e90422d8b tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] [instance: 65cd4af4-30cf-4435-8f32-501db450905f] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1862.360594] env[62405]: INFO nova.compute.manager [None req-bada42d0-8459-4144-ae42-917e90422d8b tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] [instance: 65cd4af4-30cf-4435-8f32-501db450905f] Took 1.22 seconds to destroy the instance on the hypervisor. [ 1862.360836] env[62405]: DEBUG oslo.service.loopingcall [None req-bada42d0-8459-4144-ae42-917e90422d8b tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1862.361030] env[62405]: DEBUG nova.compute.manager [-] [instance: 65cd4af4-30cf-4435-8f32-501db450905f] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1862.361126] env[62405]: DEBUG nova.network.neutron [-] [instance: 65cd4af4-30cf-4435-8f32-501db450905f] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1862.400019] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947687, 'name': CreateVM_Task, 'duration_secs': 0.680641} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1862.400236] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 171910d2-02b8-4219-ae75-5cecccea1de3] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1862.401042] env[62405]: DEBUG oslo_concurrency.lockutils [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1862.401209] env[62405]: DEBUG oslo_concurrency.lockutils [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1862.401561] env[62405]: DEBUG oslo_concurrency.lockutils [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1862.401866] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-812a6d93-1247-4445-b11b-2a6bbf1e5d90 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.408583] env[62405]: DEBUG oslo_vmware.api [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Waiting for the task: (returnval){ [ 1862.408583] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52a7bcdd-6298-ce27-30b1-398bf72519f7" [ 1862.408583] env[62405]: _type = "Task" [ 1862.408583] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1862.416339] env[62405]: DEBUG oslo_vmware.api [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52a7bcdd-6298-ce27-30b1-398bf72519f7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1862.690582] env[62405]: DEBUG nova.network.neutron [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 86378df0-a658-427d-aca5-de25f84eb28b] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1862.781388] env[62405]: DEBUG nova.scheduler.client.report [None req-4eb17867-a80f-4d5a-af1b-ffbdf0589cd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1862.876388] env[62405]: DEBUG nova.compute.manager [req-493145c5-0686-4d34-b2c5-cfa7788beaba req-5786ac97-8a6d-45cf-9a0a-0ea7d9655282 service nova] [instance: 86378df0-a658-427d-aca5-de25f84eb28b] Received event network-vif-plugged-f58e9a5c-89b1-4aff-8825-ba44d10e8d46 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1862.876606] env[62405]: DEBUG oslo_concurrency.lockutils [req-493145c5-0686-4d34-b2c5-cfa7788beaba req-5786ac97-8a6d-45cf-9a0a-0ea7d9655282 service nova] Acquiring lock "86378df0-a658-427d-aca5-de25f84eb28b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1862.876939] env[62405]: DEBUG oslo_concurrency.lockutils [req-493145c5-0686-4d34-b2c5-cfa7788beaba req-5786ac97-8a6d-45cf-9a0a-0ea7d9655282 service nova] Lock "86378df0-a658-427d-aca5-de25f84eb28b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1862.877561] env[62405]: DEBUG oslo_concurrency.lockutils [req-493145c5-0686-4d34-b2c5-cfa7788beaba req-5786ac97-8a6d-45cf-9a0a-0ea7d9655282 service nova] Lock "86378df0-a658-427d-aca5-de25f84eb28b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1862.877667] env[62405]: DEBUG nova.compute.manager [req-493145c5-0686-4d34-b2c5-cfa7788beaba req-5786ac97-8a6d-45cf-9a0a-0ea7d9655282 service nova] [instance: 86378df0-a658-427d-aca5-de25f84eb28b] No waiting events found dispatching network-vif-plugged-f58e9a5c-89b1-4aff-8825-ba44d10e8d46 {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1862.877843] env[62405]: WARNING nova.compute.manager [req-493145c5-0686-4d34-b2c5-cfa7788beaba req-5786ac97-8a6d-45cf-9a0a-0ea7d9655282 service nova] [instance: 86378df0-a658-427d-aca5-de25f84eb28b] Received unexpected event network-vif-plugged-f58e9a5c-89b1-4aff-8825-ba44d10e8d46 for instance with vm_state building and task_state spawning. [ 1862.878017] env[62405]: DEBUG nova.compute.manager [req-493145c5-0686-4d34-b2c5-cfa7788beaba req-5786ac97-8a6d-45cf-9a0a-0ea7d9655282 service nova] [instance: 86378df0-a658-427d-aca5-de25f84eb28b] Received event network-changed-f58e9a5c-89b1-4aff-8825-ba44d10e8d46 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1862.878198] env[62405]: DEBUG nova.compute.manager [req-493145c5-0686-4d34-b2c5-cfa7788beaba req-5786ac97-8a6d-45cf-9a0a-0ea7d9655282 service nova] [instance: 86378df0-a658-427d-aca5-de25f84eb28b] Refreshing instance network info cache due to event network-changed-f58e9a5c-89b1-4aff-8825-ba44d10e8d46. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1862.878382] env[62405]: DEBUG oslo_concurrency.lockutils [req-493145c5-0686-4d34-b2c5-cfa7788beaba req-5786ac97-8a6d-45cf-9a0a-0ea7d9655282 service nova] Acquiring lock "refresh_cache-86378df0-a658-427d-aca5-de25f84eb28b" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1862.919936] env[62405]: DEBUG oslo_vmware.api [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52a7bcdd-6298-ce27-30b1-398bf72519f7, 'name': SearchDatastore_Task, 'duration_secs': 0.009977} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1862.920595] env[62405]: DEBUG nova.network.neutron [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 86378df0-a658-427d-aca5-de25f84eb28b] Updating instance_info_cache with network_info: [{"id": "f58e9a5c-89b1-4aff-8825-ba44d10e8d46", "address": "fa:16:3e:b2:01:84", "network": {"id": "c3707aa8-0488-4e47-a655-7bb7788995c0", "bridge": "br-int", "label": "tempest-ServersTestJSON-2054002489-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ba2fba100b943a2a415ec37b9365388", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "68ec9c06-8680-4a41-abad-cddbd1f768c9", "external-id": "nsx-vlan-transportzone-883", "segmentation_id": 883, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf58e9a5c-89", "ovs_interfaceid": "f58e9a5c-89b1-4aff-8825-ba44d10e8d46", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1862.921984] env[62405]: DEBUG oslo_concurrency.lockutils [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1862.922263] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] [instance: 171910d2-02b8-4219-ae75-5cecccea1de3] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1862.922896] env[62405]: DEBUG oslo_concurrency.lockutils [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1862.922896] env[62405]: DEBUG oslo_concurrency.lockutils [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1862.923058] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1862.931365] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c406a8e4-2aa3-47e6-bc42-f826a66dc5c3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.934887] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1862.935105] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1862.935917] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1fcc8810-8629-4924-9ef8-0b6e0a2c9db9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.941881] env[62405]: DEBUG oslo_vmware.api [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Waiting for the task: (returnval){ [ 1862.941881] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52585101-06bd-9a42-4e79-ab7e46917e14" [ 1862.941881] env[62405]: _type = "Task" [ 1862.941881] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1862.950275] env[62405]: DEBUG oslo_vmware.api [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52585101-06bd-9a42-4e79-ab7e46917e14, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1863.074099] env[62405]: DEBUG nova.network.neutron [None req-968eeebd-6ba3-4286-a9d8-e474031bb253 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Successfully updated port: bfef94b3-682e-48fb-8149-02040e229cfb {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1863.085672] env[62405]: DEBUG nova.network.neutron [req-82933477-9a70-44a2-8810-32bbf913c5cf req-5e7548d0-f5b5-4b1a-8a2a-624dfacc3933 service nova] [instance: 171910d2-02b8-4219-ae75-5cecccea1de3] Updated VIF entry in instance network info cache for port 2b494e96-08e7-4608-a930-5d9da520c342. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1863.086131] env[62405]: DEBUG nova.network.neutron [req-82933477-9a70-44a2-8810-32bbf913c5cf req-5e7548d0-f5b5-4b1a-8a2a-624dfacc3933 service nova] [instance: 171910d2-02b8-4219-ae75-5cecccea1de3] Updating instance_info_cache with network_info: [{"id": "0a8cd850-d9e3-4640-98b7-44b386609be6", "address": "fa:16:3e:ac:d6:6d", "network": {"id": "d2fb9418-fb56-44ed-b77d-31d4e09569bd", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-509329953", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.94", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e0d6dfea772e432289163b14e9e341c1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b6942d7-d4ab-4b2a-8d0f-76bf2a2478ad", "external-id": "nsx-vlan-transportzone-871", "segmentation_id": 871, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0a8cd850-d9", "ovs_interfaceid": "0a8cd850-d9e3-4640-98b7-44b386609be6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "2b494e96-08e7-4608-a930-5d9da520c342", "address": "fa:16:3e:45:c3:cf", "network": {"id": "daafb1e7-9436-4cb4-8856-6378822f38ea", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1626101583", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.40", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "e0d6dfea772e432289163b14e9e341c1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b4d548e7-d762-406a-bb2d-dc7168a8ca67", "external-id": "nsx-vlan-transportzone-796", "segmentation_id": 796, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2b494e96-08", "ovs_interfaceid": "2b494e96-08e7-4608-a930-5d9da520c342", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1863.089587] env[62405]: DEBUG nova.compute.manager [req-90542558-116f-46eb-b755-b2f40c2036f7 req-6df9be70-1ba0-404b-a850-b75494684b5c service nova] [instance: 65cd4af4-30cf-4435-8f32-501db450905f] Received event network-vif-deleted-02e1d24b-61e9-485a-8968-37f57cd76b08 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1863.089786] env[62405]: INFO nova.compute.manager [req-90542558-116f-46eb-b755-b2f40c2036f7 req-6df9be70-1ba0-404b-a850-b75494684b5c service nova] [instance: 65cd4af4-30cf-4435-8f32-501db450905f] Neutron deleted interface 02e1d24b-61e9-485a-8968-37f57cd76b08; detaching it from the instance and deleting it from the info cache [ 1863.089961] env[62405]: DEBUG nova.network.neutron [req-90542558-116f-46eb-b755-b2f40c2036f7 req-6df9be70-1ba0-404b-a850-b75494684b5c service nova] [instance: 65cd4af4-30cf-4435-8f32-501db450905f] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1863.136101] env[62405]: DEBUG nova.network.neutron [-] [instance: 65cd4af4-30cf-4435-8f32-501db450905f] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1863.229736] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Acquiring lock "c39d9059-8da4-4c8d-99ab-d66b8445e7da" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1863.229994] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Lock "c39d9059-8da4-4c8d-99ab-d66b8445e7da" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1863.287057] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4eb17867-a80f-4d5a-af1b-ffbdf0589cd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.354s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1863.287707] env[62405]: DEBUG nova.compute.manager [None req-4eb17867-a80f-4d5a-af1b-ffbdf0589cd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: 81aebf11-5d80-4a86-b232-3ecc5f3892c2] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1863.292464] env[62405]: DEBUG oslo_concurrency.lockutils [None req-96b62204-f272-43ef-93d3-fc38c043a21e tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.365s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1863.292676] env[62405]: DEBUG oslo_concurrency.lockutils [None req-96b62204-f272-43ef-93d3-fc38c043a21e tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1863.296264] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f4454d09-a9d0-41fe-b7a9-31e8732a8ffa tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.449s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1863.296264] env[62405]: DEBUG nova.objects.instance [None req-f4454d09-a9d0-41fe-b7a9-31e8732a8ffa tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Lazy-loading 'resources' on Instance uuid d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1863.322452] env[62405]: INFO nova.scheduler.client.report [None req-96b62204-f272-43ef-93d3-fc38c043a21e tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Deleted allocations for instance a1a84837-deef-4ffc-8a47-4891bfc2c87a [ 1863.426830] env[62405]: DEBUG oslo_concurrency.lockutils [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Releasing lock "refresh_cache-86378df0-a658-427d-aca5-de25f84eb28b" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1863.427412] env[62405]: DEBUG nova.compute.manager [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 86378df0-a658-427d-aca5-de25f84eb28b] Instance network_info: |[{"id": "f58e9a5c-89b1-4aff-8825-ba44d10e8d46", "address": "fa:16:3e:b2:01:84", "network": {"id": "c3707aa8-0488-4e47-a655-7bb7788995c0", "bridge": "br-int", "label": "tempest-ServersTestJSON-2054002489-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ba2fba100b943a2a415ec37b9365388", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "68ec9c06-8680-4a41-abad-cddbd1f768c9", "external-id": "nsx-vlan-transportzone-883", "segmentation_id": 883, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf58e9a5c-89", "ovs_interfaceid": "f58e9a5c-89b1-4aff-8825-ba44d10e8d46", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1863.427736] env[62405]: DEBUG oslo_concurrency.lockutils [req-493145c5-0686-4d34-b2c5-cfa7788beaba req-5786ac97-8a6d-45cf-9a0a-0ea7d9655282 service nova] Acquired lock "refresh_cache-86378df0-a658-427d-aca5-de25f84eb28b" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1863.427923] env[62405]: DEBUG nova.network.neutron [req-493145c5-0686-4d34-b2c5-cfa7788beaba req-5786ac97-8a6d-45cf-9a0a-0ea7d9655282 service nova] [instance: 86378df0-a658-427d-aca5-de25f84eb28b] Refreshing network info cache for port f58e9a5c-89b1-4aff-8825-ba44d10e8d46 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1863.429151] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 86378df0-a658-427d-aca5-de25f84eb28b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b2:01:84', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '68ec9c06-8680-4a41-abad-cddbd1f768c9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f58e9a5c-89b1-4aff-8825-ba44d10e8d46', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1863.440189] env[62405]: DEBUG oslo.service.loopingcall [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1863.440189] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 86378df0-a658-427d-aca5-de25f84eb28b] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1863.440189] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7509748e-99da-49df-8ca1-fb3232797310 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.467022] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1863.467022] env[62405]: value = "task-1947690" [ 1863.467022] env[62405]: _type = "Task" [ 1863.467022] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1863.470537] env[62405]: DEBUG oslo_vmware.api [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52585101-06bd-9a42-4e79-ab7e46917e14, 'name': SearchDatastore_Task, 'duration_secs': 0.008785} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1863.474232] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-471fe55c-21b9-4208-af1c-1c03df12d549 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.483671] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947690, 'name': CreateVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1863.484017] env[62405]: DEBUG oslo_vmware.api [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Waiting for the task: (returnval){ [ 1863.484017] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52119cec-6e80-85f1-ade5-9fc45dfc8042" [ 1863.484017] env[62405]: _type = "Task" [ 1863.484017] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1863.493080] env[62405]: DEBUG oslo_vmware.api [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52119cec-6e80-85f1-ade5-9fc45dfc8042, 'name': SearchDatastore_Task, 'duration_secs': 0.008949} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1863.493352] env[62405]: DEBUG oslo_concurrency.lockutils [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1863.493587] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 171910d2-02b8-4219-ae75-5cecccea1de3/171910d2-02b8-4219-ae75-5cecccea1de3.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1863.493837] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-29247420-1727-4747-815e-5749ce89dbf2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.501957] env[62405]: DEBUG oslo_vmware.api [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Waiting for the task: (returnval){ [ 1863.501957] env[62405]: value = "task-1947691" [ 1863.501957] env[62405]: _type = "Task" [ 1863.501957] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1863.509199] env[62405]: DEBUG oslo_vmware.api [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Task: {'id': task-1947691, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1863.578994] env[62405]: DEBUG oslo_concurrency.lockutils [None req-968eeebd-6ba3-4286-a9d8-e474031bb253 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquiring lock "refresh_cache-9d97bf1d-6830-48b1-831b-bf2b52188f32" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1863.579193] env[62405]: DEBUG oslo_concurrency.lockutils [None req-968eeebd-6ba3-4286-a9d8-e474031bb253 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquired lock "refresh_cache-9d97bf1d-6830-48b1-831b-bf2b52188f32" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1863.579263] env[62405]: DEBUG nova.network.neutron [None req-968eeebd-6ba3-4286-a9d8-e474031bb253 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1863.593895] env[62405]: DEBUG oslo_concurrency.lockutils [req-82933477-9a70-44a2-8810-32bbf913c5cf req-5e7548d0-f5b5-4b1a-8a2a-624dfacc3933 service nova] Releasing lock "refresh_cache-171910d2-02b8-4219-ae75-5cecccea1de3" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1863.594806] env[62405]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-58f94059-f1b8-435a-a3ab-eebddb4e0035 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.603711] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed94c494-8858-4238-9866-50ee468925ed {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.634608] env[62405]: DEBUG nova.compute.manager [req-90542558-116f-46eb-b755-b2f40c2036f7 req-6df9be70-1ba0-404b-a850-b75494684b5c service nova] [instance: 65cd4af4-30cf-4435-8f32-501db450905f] Detach interface failed, port_id=02e1d24b-61e9-485a-8968-37f57cd76b08, reason: Instance 65cd4af4-30cf-4435-8f32-501db450905f could not be found. {{(pid=62405) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11483}} [ 1863.638361] env[62405]: INFO nova.compute.manager [-] [instance: 65cd4af4-30cf-4435-8f32-501db450905f] Took 1.28 seconds to deallocate network for instance. [ 1863.732750] env[62405]: DEBUG nova.compute.manager [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1863.798056] env[62405]: DEBUG nova.compute.utils [None req-4eb17867-a80f-4d5a-af1b-ffbdf0589cd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1863.802337] env[62405]: DEBUG nova.compute.manager [None req-4eb17867-a80f-4d5a-af1b-ffbdf0589cd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: 81aebf11-5d80-4a86-b232-3ecc5f3892c2] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1863.802529] env[62405]: DEBUG nova.network.neutron [None req-4eb17867-a80f-4d5a-af1b-ffbdf0589cd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: 81aebf11-5d80-4a86-b232-3ecc5f3892c2] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1863.833590] env[62405]: DEBUG oslo_concurrency.lockutils [None req-96b62204-f272-43ef-93d3-fc38c043a21e tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Lock "a1a84837-deef-4ffc-8a47-4891bfc2c87a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.525s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1863.835114] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "a1a84837-deef-4ffc-8a47-4891bfc2c87a" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 10.579s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1863.835867] env[62405]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8b6f0bff-ed3e-414b-bbfc-0fe8f31ae63c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.847364] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08079768-142b-43cf-afcf-4a0bfbc361b4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.868365] env[62405]: DEBUG nova.policy [None req-4eb17867-a80f-4d5a-af1b-ffbdf0589cd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ae6ca334510b4445a23dc2fb38215590', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f1a1645e38674042828c78155974f95e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1863.991810] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947690, 'name': CreateVM_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1864.014486] env[62405]: DEBUG oslo_vmware.api [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Task: {'id': task-1947691, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1864.147197] env[62405]: DEBUG oslo_concurrency.lockutils [None req-bada42d0-8459-4144-ae42-917e90422d8b tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1864.212399] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3ae3f71-f436-4b65-a596-ee91beb45d51 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.222542] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45b266de-4b46-4c84-8e6a-641f279e09d1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.261944] env[62405]: WARNING nova.network.neutron [None req-968eeebd-6ba3-4286-a9d8-e474031bb253 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] 8e7f7222-48db-4dd5-a9e8-9a6d2b598918 already exists in list: networks containing: ['8e7f7222-48db-4dd5-a9e8-9a6d2b598918']. ignoring it [ 1864.262143] env[62405]: WARNING nova.network.neutron [None req-968eeebd-6ba3-4286-a9d8-e474031bb253 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] 8e7f7222-48db-4dd5-a9e8-9a6d2b598918 already exists in list: networks containing: ['8e7f7222-48db-4dd5-a9e8-9a6d2b598918']. ignoring it [ 1864.265213] env[62405]: DEBUG nova.network.neutron [None req-4eb17867-a80f-4d5a-af1b-ffbdf0589cd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: 81aebf11-5d80-4a86-b232-3ecc5f3892c2] Successfully created port: af199d5b-90da-4443-ac9d-e8d6bf721a80 {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1864.267790] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83d62949-0a39-4636-9742-762fbdef70ad {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.279395] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5fb7f90-6905-4efc-a911-f111c0e844dc {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.285667] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1864.297463] env[62405]: DEBUG nova.compute.provider_tree [None req-f4454d09-a9d0-41fe-b7a9-31e8732a8ffa tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1864.306193] env[62405]: DEBUG nova.compute.manager [None req-4eb17867-a80f-4d5a-af1b-ffbdf0589cd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: 81aebf11-5d80-4a86-b232-3ecc5f3892c2] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1864.397517] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "a1a84837-deef-4ffc-8a47-4891bfc2c87a" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.562s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1864.482203] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947690, 'name': CreateVM_Task, 'duration_secs': 0.546938} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1864.482274] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 86378df0-a658-427d-aca5-de25f84eb28b] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1864.483634] env[62405]: DEBUG oslo_concurrency.lockutils [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1864.483634] env[62405]: DEBUG oslo_concurrency.lockutils [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1864.483634] env[62405]: DEBUG oslo_concurrency.lockutils [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1864.483884] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-89ddfade-8f1a-479f-b3e8-ce0d90cfdb96 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.489244] env[62405]: DEBUG oslo_vmware.api [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Waiting for the task: (returnval){ [ 1864.489244] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52141989-eaf1-c34e-94c8-2e275f38c911" [ 1864.489244] env[62405]: _type = "Task" [ 1864.489244] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1864.496902] env[62405]: DEBUG oslo_vmware.api [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52141989-eaf1-c34e-94c8-2e275f38c911, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1864.512462] env[62405]: DEBUG oslo_vmware.api [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Task: {'id': task-1947691, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.523028} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1864.512983] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 171910d2-02b8-4219-ae75-5cecccea1de3/171910d2-02b8-4219-ae75-5cecccea1de3.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1864.513256] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] [instance: 171910d2-02b8-4219-ae75-5cecccea1de3] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1864.513507] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bdfc7686-62d7-4ba2-ba2a-0d6d5daee963 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.520573] env[62405]: DEBUG oslo_vmware.api [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Waiting for the task: (returnval){ [ 1864.520573] env[62405]: value = "task-1947692" [ 1864.520573] env[62405]: _type = "Task" [ 1864.520573] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1864.529240] env[62405]: DEBUG oslo_vmware.api [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Task: {'id': task-1947692, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1864.689519] env[62405]: DEBUG nova.network.neutron [req-493145c5-0686-4d34-b2c5-cfa7788beaba req-5786ac97-8a6d-45cf-9a0a-0ea7d9655282 service nova] [instance: 86378df0-a658-427d-aca5-de25f84eb28b] Updated VIF entry in instance network info cache for port f58e9a5c-89b1-4aff-8825-ba44d10e8d46. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1864.689519] env[62405]: DEBUG nova.network.neutron [req-493145c5-0686-4d34-b2c5-cfa7788beaba req-5786ac97-8a6d-45cf-9a0a-0ea7d9655282 service nova] [instance: 86378df0-a658-427d-aca5-de25f84eb28b] Updating instance_info_cache with network_info: [{"id": "f58e9a5c-89b1-4aff-8825-ba44d10e8d46", "address": "fa:16:3e:b2:01:84", "network": {"id": "c3707aa8-0488-4e47-a655-7bb7788995c0", "bridge": "br-int", "label": "tempest-ServersTestJSON-2054002489-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ba2fba100b943a2a415ec37b9365388", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "68ec9c06-8680-4a41-abad-cddbd1f768c9", "external-id": "nsx-vlan-transportzone-883", "segmentation_id": 883, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf58e9a5c-89", "ovs_interfaceid": "f58e9a5c-89b1-4aff-8825-ba44d10e8d46", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1864.800506] env[62405]: DEBUG nova.scheduler.client.report [None req-f4454d09-a9d0-41fe-b7a9-31e8732a8ffa tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1864.943593] env[62405]: DEBUG nova.compute.manager [req-3b1e3f98-e98a-4730-9869-c896d30f2486 req-4ecea4c3-7ec0-4b8e-90b1-47420c83bd84 service nova] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Received event network-vif-plugged-bfef94b3-682e-48fb-8149-02040e229cfb {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1864.944119] env[62405]: DEBUG oslo_concurrency.lockutils [req-3b1e3f98-e98a-4730-9869-c896d30f2486 req-4ecea4c3-7ec0-4b8e-90b1-47420c83bd84 service nova] Acquiring lock "9d97bf1d-6830-48b1-831b-bf2b52188f32-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1864.944119] env[62405]: DEBUG oslo_concurrency.lockutils [req-3b1e3f98-e98a-4730-9869-c896d30f2486 req-4ecea4c3-7ec0-4b8e-90b1-47420c83bd84 service nova] Lock "9d97bf1d-6830-48b1-831b-bf2b52188f32-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1864.944345] env[62405]: DEBUG oslo_concurrency.lockutils [req-3b1e3f98-e98a-4730-9869-c896d30f2486 req-4ecea4c3-7ec0-4b8e-90b1-47420c83bd84 service nova] Lock "9d97bf1d-6830-48b1-831b-bf2b52188f32-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1864.944565] env[62405]: DEBUG nova.compute.manager [req-3b1e3f98-e98a-4730-9869-c896d30f2486 req-4ecea4c3-7ec0-4b8e-90b1-47420c83bd84 service nova] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] No waiting events found dispatching network-vif-plugged-bfef94b3-682e-48fb-8149-02040e229cfb {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1864.944683] env[62405]: WARNING nova.compute.manager [req-3b1e3f98-e98a-4730-9869-c896d30f2486 req-4ecea4c3-7ec0-4b8e-90b1-47420c83bd84 service nova] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Received unexpected event network-vif-plugged-bfef94b3-682e-48fb-8149-02040e229cfb for instance with vm_state active and task_state None. [ 1864.944963] env[62405]: DEBUG nova.compute.manager [req-3b1e3f98-e98a-4730-9869-c896d30f2486 req-4ecea4c3-7ec0-4b8e-90b1-47420c83bd84 service nova] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Received event network-changed-bfef94b3-682e-48fb-8149-02040e229cfb {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1864.945171] env[62405]: DEBUG nova.compute.manager [req-3b1e3f98-e98a-4730-9869-c896d30f2486 req-4ecea4c3-7ec0-4b8e-90b1-47420c83bd84 service nova] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Refreshing instance network info cache due to event network-changed-bfef94b3-682e-48fb-8149-02040e229cfb. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1864.945392] env[62405]: DEBUG oslo_concurrency.lockutils [req-3b1e3f98-e98a-4730-9869-c896d30f2486 req-4ecea4c3-7ec0-4b8e-90b1-47420c83bd84 service nova] Acquiring lock "refresh_cache-9d97bf1d-6830-48b1-831b-bf2b52188f32" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1865.002220] env[62405]: DEBUG oslo_vmware.api [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52141989-eaf1-c34e-94c8-2e275f38c911, 'name': SearchDatastore_Task, 'duration_secs': 0.009772} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1865.002220] env[62405]: DEBUG oslo_concurrency.lockutils [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1865.002220] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 86378df0-a658-427d-aca5-de25f84eb28b] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1865.002220] env[62405]: DEBUG oslo_concurrency.lockutils [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1865.002220] env[62405]: DEBUG oslo_concurrency.lockutils [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1865.002220] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1865.002220] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ae782f8e-e517-482b-88dd-4ff3c77a2dfa {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.012722] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1865.012896] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1865.013995] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-448c889f-71c6-4a6e-bd79-0de6a08de6fe {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.019292] env[62405]: DEBUG oslo_vmware.api [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Waiting for the task: (returnval){ [ 1865.019292] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52cbb6af-e200-df8c-864f-71f750d15a8b" [ 1865.019292] env[62405]: _type = "Task" [ 1865.019292] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1865.030394] env[62405]: DEBUG oslo_vmware.api [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52cbb6af-e200-df8c-864f-71f750d15a8b, 'name': SearchDatastore_Task, 'duration_secs': 0.008243} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1865.033686] env[62405]: DEBUG oslo_vmware.api [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Task: {'id': task-1947692, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067851} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1865.033956] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b6857bc1-3180-4ae9-a6c6-446aac3dca17 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.037071] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] [instance: 171910d2-02b8-4219-ae75-5cecccea1de3] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1865.037071] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7598f034-00c7-4355-813f-85772aec0247 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.044022] env[62405]: DEBUG oslo_vmware.api [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Waiting for the task: (returnval){ [ 1865.044022] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52bb7b42-37ee-61cc-57ce-ec3fa9e6ce1e" [ 1865.044022] env[62405]: _type = "Task" [ 1865.044022] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1865.064071] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] [instance: 171910d2-02b8-4219-ae75-5cecccea1de3] Reconfiguring VM instance instance-00000051 to attach disk [datastore1] 171910d2-02b8-4219-ae75-5cecccea1de3/171910d2-02b8-4219-ae75-5cecccea1de3.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1865.070177] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7f012010-1bca-4f85-aa2e-07f8995295aa {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.094275] env[62405]: DEBUG oslo_vmware.api [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52bb7b42-37ee-61cc-57ce-ec3fa9e6ce1e, 'name': SearchDatastore_Task, 'duration_secs': 0.008929} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1865.095331] env[62405]: DEBUG oslo_concurrency.lockutils [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1865.095626] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 86378df0-a658-427d-aca5-de25f84eb28b/86378df0-a658-427d-aca5-de25f84eb28b.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1865.095909] env[62405]: DEBUG oslo_vmware.api [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Waiting for the task: (returnval){ [ 1865.095909] env[62405]: value = "task-1947693" [ 1865.095909] env[62405]: _type = "Task" [ 1865.095909] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1865.096172] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6f1cff87-ec50-4ee3-8e93-09a40469db61 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.106358] env[62405]: DEBUG oslo_vmware.api [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Task: {'id': task-1947693, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1865.107605] env[62405]: DEBUG oslo_vmware.api [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Waiting for the task: (returnval){ [ 1865.107605] env[62405]: value = "task-1947694" [ 1865.107605] env[62405]: _type = "Task" [ 1865.107605] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1865.115331] env[62405]: DEBUG oslo_vmware.api [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947694, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1865.191972] env[62405]: DEBUG oslo_concurrency.lockutils [req-493145c5-0686-4d34-b2c5-cfa7788beaba req-5786ac97-8a6d-45cf-9a0a-0ea7d9655282 service nova] Releasing lock "refresh_cache-86378df0-a658-427d-aca5-de25f84eb28b" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1865.216883] env[62405]: DEBUG nova.network.neutron [None req-968eeebd-6ba3-4286-a9d8-e474031bb253 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Updating instance_info_cache with network_info: [{"id": "09308517-a17c-48d3-b01f-fed73b19adfd", "address": "fa:16:3e:d4:17:23", "network": {"id": "8e7f7222-48db-4dd5-a9e8-9a6d2b598918", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1945720715-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.200", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba9083cddcc24345b6ea5d2cbbbec5ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap09308517-a1", "ovs_interfaceid": "09308517-a17c-48d3-b01f-fed73b19adfd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "a466989b-10e1-492c-a30a-33ba96b092ca", "address": "fa:16:3e:f8:73:cc", "network": {"id": "8e7f7222-48db-4dd5-a9e8-9a6d2b598918", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1945720715-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba9083cddcc24345b6ea5d2cbbbec5ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa466989b-10", "ovs_interfaceid": "a466989b-10e1-492c-a30a-33ba96b092ca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "bfef94b3-682e-48fb-8149-02040e229cfb", "address": "fa:16:3e:96:ab:a9", "network": {"id": "8e7f7222-48db-4dd5-a9e8-9a6d2b598918", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1945720715-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba9083cddcc24345b6ea5d2cbbbec5ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbfef94b3-68", "ovs_interfaceid": "bfef94b3-682e-48fb-8149-02040e229cfb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1865.306553] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f4454d09-a9d0-41fe-b7a9-31e8732a8ffa tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.012s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1865.311424] env[62405]: DEBUG oslo_concurrency.lockutils [None req-174a367c-d208-4d4f-a2c0-0d092ce927ed tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.698s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1865.312963] env[62405]: INFO nova.compute.claims [None req-174a367c-d208-4d4f-a2c0-0d092ce927ed tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] [instance: 8f133517-cff2-40c7-8333-a9116163313a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1865.318537] env[62405]: DEBUG nova.compute.manager [None req-4eb17867-a80f-4d5a-af1b-ffbdf0589cd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: 81aebf11-5d80-4a86-b232-3ecc5f3892c2] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1865.350506] env[62405]: INFO nova.scheduler.client.report [None req-f4454d09-a9d0-41fe-b7a9-31e8732a8ffa tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Deleted allocations for instance d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d [ 1865.353779] env[62405]: DEBUG nova.virt.hardware [None req-4eb17867-a80f-4d5a-af1b-ffbdf0589cd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1865.353779] env[62405]: DEBUG nova.virt.hardware [None req-4eb17867-a80f-4d5a-af1b-ffbdf0589cd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1865.353935] env[62405]: DEBUG nova.virt.hardware [None req-4eb17867-a80f-4d5a-af1b-ffbdf0589cd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1865.356025] env[62405]: DEBUG nova.virt.hardware [None req-4eb17867-a80f-4d5a-af1b-ffbdf0589cd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1865.356025] env[62405]: DEBUG nova.virt.hardware [None req-4eb17867-a80f-4d5a-af1b-ffbdf0589cd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1865.356025] env[62405]: DEBUG nova.virt.hardware [None req-4eb17867-a80f-4d5a-af1b-ffbdf0589cd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1865.356025] env[62405]: DEBUG nova.virt.hardware [None req-4eb17867-a80f-4d5a-af1b-ffbdf0589cd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1865.356025] env[62405]: DEBUG nova.virt.hardware [None req-4eb17867-a80f-4d5a-af1b-ffbdf0589cd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1865.356025] env[62405]: DEBUG nova.virt.hardware [None req-4eb17867-a80f-4d5a-af1b-ffbdf0589cd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1865.356025] env[62405]: DEBUG nova.virt.hardware [None req-4eb17867-a80f-4d5a-af1b-ffbdf0589cd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1865.356025] env[62405]: DEBUG nova.virt.hardware [None req-4eb17867-a80f-4d5a-af1b-ffbdf0589cd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1865.356939] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ba73e8d-16f0-4313-a9a2-a5211deca71b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.371303] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25c08847-228e-4c37-9dcb-c849f6039b28 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.609938] env[62405]: DEBUG oslo_vmware.api [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Task: {'id': task-1947693, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1865.619558] env[62405]: DEBUG oslo_vmware.api [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947694, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1865.721028] env[62405]: DEBUG oslo_concurrency.lockutils [None req-968eeebd-6ba3-4286-a9d8-e474031bb253 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Releasing lock "refresh_cache-9d97bf1d-6830-48b1-831b-bf2b52188f32" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1865.721624] env[62405]: DEBUG oslo_concurrency.lockutils [None req-968eeebd-6ba3-4286-a9d8-e474031bb253 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquiring lock "9d97bf1d-6830-48b1-831b-bf2b52188f32" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1865.721794] env[62405]: DEBUG oslo_concurrency.lockutils [None req-968eeebd-6ba3-4286-a9d8-e474031bb253 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquired lock "9d97bf1d-6830-48b1-831b-bf2b52188f32" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1865.722114] env[62405]: DEBUG oslo_concurrency.lockutils [req-3b1e3f98-e98a-4730-9869-c896d30f2486 req-4ecea4c3-7ec0-4b8e-90b1-47420c83bd84 service nova] Acquired lock "refresh_cache-9d97bf1d-6830-48b1-831b-bf2b52188f32" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1865.722300] env[62405]: DEBUG nova.network.neutron [req-3b1e3f98-e98a-4730-9869-c896d30f2486 req-4ecea4c3-7ec0-4b8e-90b1-47420c83bd84 service nova] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Refreshing network info cache for port bfef94b3-682e-48fb-8149-02040e229cfb {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1865.724392] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97f6b404-ff15-46c3-a0e1-0c8bbf827fa3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.743155] env[62405]: DEBUG nova.virt.hardware [None req-968eeebd-6ba3-4286-a9d8-e474031bb253 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1865.743433] env[62405]: DEBUG nova.virt.hardware [None req-968eeebd-6ba3-4286-a9d8-e474031bb253 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1865.743569] env[62405]: DEBUG nova.virt.hardware [None req-968eeebd-6ba3-4286-a9d8-e474031bb253 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1865.743886] env[62405]: DEBUG nova.virt.hardware [None req-968eeebd-6ba3-4286-a9d8-e474031bb253 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1865.744187] env[62405]: DEBUG nova.virt.hardware [None req-968eeebd-6ba3-4286-a9d8-e474031bb253 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1865.744378] env[62405]: DEBUG nova.virt.hardware [None req-968eeebd-6ba3-4286-a9d8-e474031bb253 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1865.744592] env[62405]: DEBUG nova.virt.hardware [None req-968eeebd-6ba3-4286-a9d8-e474031bb253 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1865.745100] env[62405]: DEBUG nova.virt.hardware [None req-968eeebd-6ba3-4286-a9d8-e474031bb253 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1865.745100] env[62405]: DEBUG nova.virt.hardware [None req-968eeebd-6ba3-4286-a9d8-e474031bb253 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1865.745100] env[62405]: DEBUG nova.virt.hardware [None req-968eeebd-6ba3-4286-a9d8-e474031bb253 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1865.745424] env[62405]: DEBUG nova.virt.hardware [None req-968eeebd-6ba3-4286-a9d8-e474031bb253 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1865.752216] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-968eeebd-6ba3-4286-a9d8-e474031bb253 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Reconfiguring VM to attach interface {{(pid=62405) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1865.753344] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-48816867-de55-4b41-ae83-73a91b6699bc {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.773850] env[62405]: DEBUG oslo_vmware.api [None req-968eeebd-6ba3-4286-a9d8-e474031bb253 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Waiting for the task: (returnval){ [ 1865.773850] env[62405]: value = "task-1947695" [ 1865.773850] env[62405]: _type = "Task" [ 1865.773850] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1865.784666] env[62405]: DEBUG oslo_vmware.api [None req-968eeebd-6ba3-4286-a9d8-e474031bb253 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947695, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1865.866442] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f4454d09-a9d0-41fe-b7a9-31e8732a8ffa tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Lock "d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.652s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1865.873022] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 12.611s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1865.873022] env[62405]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-071e317a-785a-4691-ae2d-83f3f3d2c45b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.888461] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58ed2a89-ee35-4ef3-acc2-d53fc14d2307 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.112019] env[62405]: DEBUG oslo_vmware.api [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Task: {'id': task-1947693, 'name': ReconfigVM_Task, 'duration_secs': 0.80959} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1866.112019] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] [instance: 171910d2-02b8-4219-ae75-5cecccea1de3] Reconfigured VM instance instance-00000051 to attach disk [datastore1] 171910d2-02b8-4219-ae75-5cecccea1de3/171910d2-02b8-4219-ae75-5cecccea1de3.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1866.112019] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7d65139b-0c8d-43cc-be3a-f2ce5ea32c46 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.120727] env[62405]: DEBUG oslo_vmware.api [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947694, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.735491} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1866.122269] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 86378df0-a658-427d-aca5-de25f84eb28b/86378df0-a658-427d-aca5-de25f84eb28b.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1866.122660] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 86378df0-a658-427d-aca5-de25f84eb28b] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1866.126044] env[62405]: DEBUG oslo_vmware.api [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Waiting for the task: (returnval){ [ 1866.126044] env[62405]: value = "task-1947696" [ 1866.126044] env[62405]: _type = "Task" [ 1866.126044] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1866.126044] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6f19b3b3-4912-4105-927f-709b44049b21 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.142203] env[62405]: DEBUG oslo_vmware.api [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Task: {'id': task-1947696, 'name': Rename_Task} progress is 10%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1866.142791] env[62405]: DEBUG oslo_vmware.api [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Waiting for the task: (returnval){ [ 1866.142791] env[62405]: value = "task-1947697" [ 1866.142791] env[62405]: _type = "Task" [ 1866.142791] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1866.153330] env[62405]: DEBUG oslo_vmware.api [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947697, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1866.169640] env[62405]: DEBUG nova.network.neutron [None req-4eb17867-a80f-4d5a-af1b-ffbdf0589cd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: 81aebf11-5d80-4a86-b232-3ecc5f3892c2] Successfully updated port: af199d5b-90da-4443-ac9d-e8d6bf721a80 {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1866.188781] env[62405]: DEBUG nova.compute.manager [req-ecfbccf0-2271-453b-b10e-c947c82bbedf req-d53db0bc-90bc-4267-a7fd-e485f4b417ba service nova] [instance: 81aebf11-5d80-4a86-b232-3ecc5f3892c2] Received event network-vif-plugged-af199d5b-90da-4443-ac9d-e8d6bf721a80 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1866.189061] env[62405]: DEBUG oslo_concurrency.lockutils [req-ecfbccf0-2271-453b-b10e-c947c82bbedf req-d53db0bc-90bc-4267-a7fd-e485f4b417ba service nova] Acquiring lock "81aebf11-5d80-4a86-b232-3ecc5f3892c2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1866.189308] env[62405]: DEBUG oslo_concurrency.lockutils [req-ecfbccf0-2271-453b-b10e-c947c82bbedf req-d53db0bc-90bc-4267-a7fd-e485f4b417ba service nova] Lock "81aebf11-5d80-4a86-b232-3ecc5f3892c2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1866.189577] env[62405]: DEBUG oslo_concurrency.lockutils [req-ecfbccf0-2271-453b-b10e-c947c82bbedf req-d53db0bc-90bc-4267-a7fd-e485f4b417ba service nova] Lock "81aebf11-5d80-4a86-b232-3ecc5f3892c2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1866.189723] env[62405]: DEBUG nova.compute.manager [req-ecfbccf0-2271-453b-b10e-c947c82bbedf req-d53db0bc-90bc-4267-a7fd-e485f4b417ba service nova] [instance: 81aebf11-5d80-4a86-b232-3ecc5f3892c2] No waiting events found dispatching network-vif-plugged-af199d5b-90da-4443-ac9d-e8d6bf721a80 {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1866.190399] env[62405]: WARNING nova.compute.manager [req-ecfbccf0-2271-453b-b10e-c947c82bbedf req-d53db0bc-90bc-4267-a7fd-e485f4b417ba service nova] [instance: 81aebf11-5d80-4a86-b232-3ecc5f3892c2] Received unexpected event network-vif-plugged-af199d5b-90da-4443-ac9d-e8d6bf721a80 for instance with vm_state building and task_state spawning. [ 1866.284342] env[62405]: DEBUG oslo_vmware.api [None req-968eeebd-6ba3-4286-a9d8-e474031bb253 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947695, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1866.422972] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.553s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1866.617088] env[62405]: DEBUG nova.network.neutron [req-3b1e3f98-e98a-4730-9869-c896d30f2486 req-4ecea4c3-7ec0-4b8e-90b1-47420c83bd84 service nova] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Updated VIF entry in instance network info cache for port bfef94b3-682e-48fb-8149-02040e229cfb. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1866.617737] env[62405]: DEBUG nova.network.neutron [req-3b1e3f98-e98a-4730-9869-c896d30f2486 req-4ecea4c3-7ec0-4b8e-90b1-47420c83bd84 service nova] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Updating instance_info_cache with network_info: [{"id": "09308517-a17c-48d3-b01f-fed73b19adfd", "address": "fa:16:3e:d4:17:23", "network": {"id": "8e7f7222-48db-4dd5-a9e8-9a6d2b598918", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1945720715-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.200", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba9083cddcc24345b6ea5d2cbbbec5ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap09308517-a1", "ovs_interfaceid": "09308517-a17c-48d3-b01f-fed73b19adfd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "a466989b-10e1-492c-a30a-33ba96b092ca", "address": "fa:16:3e:f8:73:cc", "network": {"id": "8e7f7222-48db-4dd5-a9e8-9a6d2b598918", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1945720715-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba9083cddcc24345b6ea5d2cbbbec5ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa466989b-10", "ovs_interfaceid": "a466989b-10e1-492c-a30a-33ba96b092ca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "bfef94b3-682e-48fb-8149-02040e229cfb", "address": "fa:16:3e:96:ab:a9", "network": {"id": "8e7f7222-48db-4dd5-a9e8-9a6d2b598918", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1945720715-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba9083cddcc24345b6ea5d2cbbbec5ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbfef94b3-68", "ovs_interfaceid": "bfef94b3-682e-48fb-8149-02040e229cfb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1866.638705] env[62405]: DEBUG oslo_vmware.api [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Task: {'id': task-1947696, 'name': Rename_Task, 'duration_secs': 0.149349} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1866.639132] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] [instance: 171910d2-02b8-4219-ae75-5cecccea1de3] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1866.639435] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7b4ed0e3-70e7-4604-844a-ca53936ed427 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.654264] env[62405]: DEBUG oslo_vmware.api [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Waiting for the task: (returnval){ [ 1866.654264] env[62405]: value = "task-1947698" [ 1866.654264] env[62405]: _type = "Task" [ 1866.654264] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1866.665160] env[62405]: DEBUG oslo_vmware.api [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947697, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071266} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1866.665970] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 86378df0-a658-427d-aca5-de25f84eb28b] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1866.666325] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69eb3ec3-7679-4d12-b2ec-0d9a0c349a5e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.676237] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4eb17867-a80f-4d5a-af1b-ffbdf0589cd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquiring lock "refresh_cache-81aebf11-5d80-4a86-b232-3ecc5f3892c2" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1866.676237] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4eb17867-a80f-4d5a-af1b-ffbdf0589cd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquired lock "refresh_cache-81aebf11-5d80-4a86-b232-3ecc5f3892c2" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1866.676237] env[62405]: DEBUG nova.network.neutron [None req-4eb17867-a80f-4d5a-af1b-ffbdf0589cd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: 81aebf11-5d80-4a86-b232-3ecc5f3892c2] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1866.677271] env[62405]: DEBUG oslo_vmware.api [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Task: {'id': task-1947698, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1866.705799] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 86378df0-a658-427d-aca5-de25f84eb28b] Reconfiguring VM instance instance-00000052 to attach disk [datastore1] 86378df0-a658-427d-aca5-de25f84eb28b/86378df0-a658-427d-aca5-de25f84eb28b.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1866.708203] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-064e215d-c810-42a1-8d79-410bc2d581b1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.728237] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6649572-ebad-4acc-afcd-c7b183df429d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.739771] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06a79a79-b974-496e-82c8-757a189a444d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.743056] env[62405]: DEBUG oslo_vmware.api [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Waiting for the task: (returnval){ [ 1866.743056] env[62405]: value = "task-1947699" [ 1866.743056] env[62405]: _type = "Task" [ 1866.743056] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1866.772223] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ebd1779-96e8-48f2-a851-9b174dc1cb6d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.777976] env[62405]: DEBUG oslo_vmware.api [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947699, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1866.785108] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-020c79b5-3546-46c9-9541-38cb194a9c5e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.791566] env[62405]: DEBUG oslo_vmware.api [None req-968eeebd-6ba3-4286-a9d8-e474031bb253 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947695, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1866.800873] env[62405]: DEBUG nova.compute.provider_tree [None req-174a367c-d208-4d4f-a2c0-0d092ce927ed tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1867.125020] env[62405]: DEBUG oslo_concurrency.lockutils [req-3b1e3f98-e98a-4730-9869-c896d30f2486 req-4ecea4c3-7ec0-4b8e-90b1-47420c83bd84 service nova] Releasing lock "refresh_cache-9d97bf1d-6830-48b1-831b-bf2b52188f32" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1867.168865] env[62405]: DEBUG oslo_vmware.api [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Task: {'id': task-1947698, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1867.250449] env[62405]: DEBUG nova.network.neutron [None req-4eb17867-a80f-4d5a-af1b-ffbdf0589cd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: 81aebf11-5d80-4a86-b232-3ecc5f3892c2] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1867.258214] env[62405]: DEBUG oslo_vmware.api [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947699, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1867.287476] env[62405]: DEBUG oslo_vmware.api [None req-968eeebd-6ba3-4286-a9d8-e474031bb253 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947695, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1867.305790] env[62405]: DEBUG nova.scheduler.client.report [None req-174a367c-d208-4d4f-a2c0-0d092ce927ed tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1867.668391] env[62405]: DEBUG oslo_vmware.api [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Task: {'id': task-1947698, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1867.725502] env[62405]: DEBUG nova.network.neutron [None req-4eb17867-a80f-4d5a-af1b-ffbdf0589cd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: 81aebf11-5d80-4a86-b232-3ecc5f3892c2] Updating instance_info_cache with network_info: [{"id": "af199d5b-90da-4443-ac9d-e8d6bf721a80", "address": "fa:16:3e:80:08:de", "network": {"id": "845c4582-a0c8-4565-8025-5cc0fd22ff9a", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1066509768-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f1a1645e38674042828c78155974f95e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaf199d5b-90", "ovs_interfaceid": "af199d5b-90da-4443-ac9d-e8d6bf721a80", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1867.754589] env[62405]: DEBUG oslo_vmware.api [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947699, 'name': ReconfigVM_Task, 'duration_secs': 0.924972} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1867.755016] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 86378df0-a658-427d-aca5-de25f84eb28b] Reconfigured VM instance instance-00000052 to attach disk [datastore1] 86378df0-a658-427d-aca5-de25f84eb28b/86378df0-a658-427d-aca5-de25f84eb28b.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1867.755721] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4b472c52-629c-45af-af92-3bd43bfbe719 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.762976] env[62405]: DEBUG oslo_vmware.api [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Waiting for the task: (returnval){ [ 1867.762976] env[62405]: value = "task-1947700" [ 1867.762976] env[62405]: _type = "Task" [ 1867.762976] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1867.771786] env[62405]: DEBUG oslo_vmware.api [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947700, 'name': Rename_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1867.787143] env[62405]: DEBUG oslo_vmware.api [None req-968eeebd-6ba3-4286-a9d8-e474031bb253 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947695, 'name': ReconfigVM_Task, 'duration_secs': 1.61616} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1867.787668] env[62405]: DEBUG oslo_concurrency.lockutils [None req-968eeebd-6ba3-4286-a9d8-e474031bb253 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Releasing lock "9d97bf1d-6830-48b1-831b-bf2b52188f32" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1867.787888] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-968eeebd-6ba3-4286-a9d8-e474031bb253 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Reconfigured VM to attach interface {{(pid=62405) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1867.812087] env[62405]: DEBUG oslo_concurrency.lockutils [None req-174a367c-d208-4d4f-a2c0-0d092ce927ed tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.501s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1867.812619] env[62405]: DEBUG nova.compute.manager [None req-174a367c-d208-4d4f-a2c0-0d092ce927ed tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] [instance: 8f133517-cff2-40c7-8333-a9116163313a] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1867.815397] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7153bd6b-4d5b-44a6-9739-370310d8a0f4 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.881s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1867.815760] env[62405]: DEBUG nova.objects.instance [None req-7153bd6b-4d5b-44a6-9739-370310d8a0f4 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Lazy-loading 'resources' on Instance uuid 742c8d94-48d1-4408-91dc-98f25661aa8d {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1868.169189] env[62405]: DEBUG oslo_vmware.api [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Task: {'id': task-1947698, 'name': PowerOnVM_Task, 'duration_secs': 1.139682} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1868.170482] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] [instance: 171910d2-02b8-4219-ae75-5cecccea1de3] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1868.170482] env[62405]: INFO nova.compute.manager [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] [instance: 171910d2-02b8-4219-ae75-5cecccea1de3] Took 18.41 seconds to spawn the instance on the hypervisor. [ 1868.170482] env[62405]: DEBUG nova.compute.manager [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] [instance: 171910d2-02b8-4219-ae75-5cecccea1de3] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1868.170672] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7da07e1d-328a-460a-b91f-0f11ba64e0cb {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.227973] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4eb17867-a80f-4d5a-af1b-ffbdf0589cd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Releasing lock "refresh_cache-81aebf11-5d80-4a86-b232-3ecc5f3892c2" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1868.228380] env[62405]: DEBUG nova.compute.manager [None req-4eb17867-a80f-4d5a-af1b-ffbdf0589cd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: 81aebf11-5d80-4a86-b232-3ecc5f3892c2] Instance network_info: |[{"id": "af199d5b-90da-4443-ac9d-e8d6bf721a80", "address": "fa:16:3e:80:08:de", "network": {"id": "845c4582-a0c8-4565-8025-5cc0fd22ff9a", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1066509768-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f1a1645e38674042828c78155974f95e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaf199d5b-90", "ovs_interfaceid": "af199d5b-90da-4443-ac9d-e8d6bf721a80", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1868.228817] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-4eb17867-a80f-4d5a-af1b-ffbdf0589cd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: 81aebf11-5d80-4a86-b232-3ecc5f3892c2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:80:08:de', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ca83c3bc-f3ec-42ab-85b3-192512f766f3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'af199d5b-90da-4443-ac9d-e8d6bf721a80', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1868.237199] env[62405]: DEBUG oslo.service.loopingcall [None req-4eb17867-a80f-4d5a-af1b-ffbdf0589cd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1868.237759] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 81aebf11-5d80-4a86-b232-3ecc5f3892c2] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1868.237998] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5fb88ebc-5296-4ef2-aa98-ed305e0d37ce {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.263834] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1868.263834] env[62405]: value = "task-1947701" [ 1868.263834] env[62405]: _type = "Task" [ 1868.263834] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1868.275141] env[62405]: DEBUG oslo_vmware.api [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947700, 'name': Rename_Task, 'duration_secs': 0.132683} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1868.280181] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 86378df0-a658-427d-aca5-de25f84eb28b] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1868.280787] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947701, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1868.280787] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3f2e6ae9-0112-40be-beb0-b2c9ef66edcb {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.287134] env[62405]: DEBUG oslo_vmware.api [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Waiting for the task: (returnval){ [ 1868.287134] env[62405]: value = "task-1947702" [ 1868.287134] env[62405]: _type = "Task" [ 1868.287134] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1868.297065] env[62405]: DEBUG oslo_concurrency.lockutils [None req-968eeebd-6ba3-4286-a9d8-e474031bb253 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Lock "interface-9d97bf1d-6830-48b1-831b-bf2b52188f32-bfef94b3-682e-48fb-8149-02040e229cfb" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 8.347s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1868.298191] env[62405]: DEBUG oslo_vmware.api [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947702, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1868.328101] env[62405]: DEBUG nova.compute.utils [None req-174a367c-d208-4d4f-a2c0-0d092ce927ed tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1868.330381] env[62405]: DEBUG nova.compute.manager [None req-174a367c-d208-4d4f-a2c0-0d092ce927ed tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] [instance: 8f133517-cff2-40c7-8333-a9116163313a] Not allocating networking since 'none' was specified. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1868.692643] env[62405]: INFO nova.compute.manager [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] [instance: 171910d2-02b8-4219-ae75-5cecccea1de3] Took 39.64 seconds to build instance. [ 1868.712217] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78f896fc-d4db-4cfd-8c1e-3f7b62f42655 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.725226] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55a2626e-bdd5-492b-98f1-173d370087aa {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.735094] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Acquiring lock "06dbb3e0-876e-4290-81f5-6f95f9d5cb37" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1868.735787] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Lock "06dbb3e0-876e-4290-81f5-6f95f9d5cb37" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1868.768441] env[62405]: DEBUG nova.compute.manager [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: 06dbb3e0-876e-4290-81f5-6f95f9d5cb37] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1868.775026] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9eb52ecb-d987-49d8-8ff1-42887220873c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.791513] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd116d57-a1c9-4a35-a8f3-6dd84697219c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.796637] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947701, 'name': CreateVM_Task, 'duration_secs': 0.464793} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1868.801297] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 81aebf11-5d80-4a86-b232-3ecc5f3892c2] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1868.803749] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4eb17867-a80f-4d5a-af1b-ffbdf0589cd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1868.804163] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4eb17867-a80f-4d5a-af1b-ffbdf0589cd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1868.805038] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4eb17867-a80f-4d5a-af1b-ffbdf0589cd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1868.816792] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a4bea834-5de7-4962-a325-fb7e19ef322e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.819710] env[62405]: DEBUG nova.compute.provider_tree [None req-7153bd6b-4d5b-44a6-9739-370310d8a0f4 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1868.825767] env[62405]: DEBUG oslo_vmware.api [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947702, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1868.830291] env[62405]: DEBUG oslo_vmware.api [None req-4eb17867-a80f-4d5a-af1b-ffbdf0589cd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for the task: (returnval){ [ 1868.830291] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52e65927-9614-0d61-a63f-df77fbbd66e1" [ 1868.830291] env[62405]: _type = "Task" [ 1868.830291] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1868.835872] env[62405]: DEBUG nova.compute.manager [None req-174a367c-d208-4d4f-a2c0-0d092ce927ed tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] [instance: 8f133517-cff2-40c7-8333-a9116163313a] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1868.843444] env[62405]: DEBUG oslo_vmware.api [None req-4eb17867-a80f-4d5a-af1b-ffbdf0589cd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52e65927-9614-0d61-a63f-df77fbbd66e1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1869.074423] env[62405]: DEBUG nova.compute.manager [req-2fbb469d-4ac7-4233-88b4-239b728c70ba req-4162368b-1b3b-48f4-81fc-b69da991d0b6 service nova] [instance: 81aebf11-5d80-4a86-b232-3ecc5f3892c2] Received event network-changed-af199d5b-90da-4443-ac9d-e8d6bf721a80 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1869.074709] env[62405]: DEBUG nova.compute.manager [req-2fbb469d-4ac7-4233-88b4-239b728c70ba req-4162368b-1b3b-48f4-81fc-b69da991d0b6 service nova] [instance: 81aebf11-5d80-4a86-b232-3ecc5f3892c2] Refreshing instance network info cache due to event network-changed-af199d5b-90da-4443-ac9d-e8d6bf721a80. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1869.075015] env[62405]: DEBUG oslo_concurrency.lockutils [req-2fbb469d-4ac7-4233-88b4-239b728c70ba req-4162368b-1b3b-48f4-81fc-b69da991d0b6 service nova] Acquiring lock "refresh_cache-81aebf11-5d80-4a86-b232-3ecc5f3892c2" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1869.075475] env[62405]: DEBUG oslo_concurrency.lockutils [req-2fbb469d-4ac7-4233-88b4-239b728c70ba req-4162368b-1b3b-48f4-81fc-b69da991d0b6 service nova] Acquired lock "refresh_cache-81aebf11-5d80-4a86-b232-3ecc5f3892c2" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1869.075729] env[62405]: DEBUG nova.network.neutron [req-2fbb469d-4ac7-4233-88b4-239b728c70ba req-4162368b-1b3b-48f4-81fc-b69da991d0b6 service nova] [instance: 81aebf11-5d80-4a86-b232-3ecc5f3892c2] Refreshing network info cache for port af199d5b-90da-4443-ac9d-e8d6bf721a80 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1869.193395] env[62405]: DEBUG oslo_concurrency.lockutils [None req-07be954f-37a2-476c-b06c-a38471ffeb1d tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Lock "171910d2-02b8-4219-ae75-5cecccea1de3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.161s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1869.193395] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "171910d2-02b8-4219-ae75-5cecccea1de3" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 15.933s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1869.193395] env[62405]: INFO nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 171910d2-02b8-4219-ae75-5cecccea1de3] During sync_power_state the instance has a pending task (spawning). Skip. [ 1869.193395] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "171910d2-02b8-4219-ae75-5cecccea1de3" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1869.304890] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1869.311148] env[62405]: DEBUG oslo_vmware.api [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947702, 'name': PowerOnVM_Task, 'duration_secs': 0.596796} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1869.311521] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 86378df0-a658-427d-aca5-de25f84eb28b] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1869.311644] env[62405]: INFO nova.compute.manager [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 86378df0-a658-427d-aca5-de25f84eb28b] Took 8.30 seconds to spawn the instance on the hypervisor. [ 1869.311840] env[62405]: DEBUG nova.compute.manager [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 86378df0-a658-427d-aca5-de25f84eb28b] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1869.313981] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e55ce04e-6aff-48e5-8c63-3473e9df466f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.330488] env[62405]: DEBUG nova.scheduler.client.report [None req-7153bd6b-4d5b-44a6-9739-370310d8a0f4 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1869.349083] env[62405]: DEBUG oslo_vmware.api [None req-4eb17867-a80f-4d5a-af1b-ffbdf0589cd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52e65927-9614-0d61-a63f-df77fbbd66e1, 'name': SearchDatastore_Task, 'duration_secs': 0.013042} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1869.349828] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4eb17867-a80f-4d5a-af1b-ffbdf0589cd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1869.350099] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-4eb17867-a80f-4d5a-af1b-ffbdf0589cd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: 81aebf11-5d80-4a86-b232-3ecc5f3892c2] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1869.350340] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4eb17867-a80f-4d5a-af1b-ffbdf0589cd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1869.350623] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4eb17867-a80f-4d5a-af1b-ffbdf0589cd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1869.350868] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-4eb17867-a80f-4d5a-af1b-ffbdf0589cd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1869.351812] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2a95ffa7-ce0b-4a87-8dd9-b4a7905dc129 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.362647] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-4eb17867-a80f-4d5a-af1b-ffbdf0589cd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1869.363758] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-4eb17867-a80f-4d5a-af1b-ffbdf0589cd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1869.363758] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-79973f05-9373-4e94-9bc5-cbeb70d10faa {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.369671] env[62405]: DEBUG oslo_vmware.api [None req-4eb17867-a80f-4d5a-af1b-ffbdf0589cd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for the task: (returnval){ [ 1869.369671] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]520868c4-af63-efb5-4647-3b3ec3961bd3" [ 1869.369671] env[62405]: _type = "Task" [ 1869.369671] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1869.378348] env[62405]: DEBUG oslo_vmware.api [None req-4eb17867-a80f-4d5a-af1b-ffbdf0589cd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]520868c4-af63-efb5-4647-3b3ec3961bd3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1869.831826] env[62405]: INFO nova.compute.manager [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 86378df0-a658-427d-aca5-de25f84eb28b] Took 35.39 seconds to build instance. [ 1869.840024] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7153bd6b-4d5b-44a6-9739-370310d8a0f4 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.025s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1869.842276] env[62405]: DEBUG oslo_concurrency.lockutils [None req-bf5fbeee-54e7-4270-8309-84e1125402f3 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.789s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1869.846896] env[62405]: INFO nova.compute.claims [None req-bf5fbeee-54e7-4270-8309-84e1125402f3 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59fe34ab-c01d-4083-8bcd-ad6b4133a66f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1869.853020] env[62405]: DEBUG nova.compute.manager [None req-174a367c-d208-4d4f-a2c0-0d092ce927ed tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] [instance: 8f133517-cff2-40c7-8333-a9116163313a] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1869.870571] env[62405]: INFO nova.scheduler.client.report [None req-7153bd6b-4d5b-44a6-9739-370310d8a0f4 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Deleted allocations for instance 742c8d94-48d1-4408-91dc-98f25661aa8d [ 1869.883152] env[62405]: DEBUG nova.virt.hardware [None req-174a367c-d208-4d4f-a2c0-0d092ce927ed tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1869.883152] env[62405]: DEBUG nova.virt.hardware [None req-174a367c-d208-4d4f-a2c0-0d092ce927ed tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1869.883152] env[62405]: DEBUG nova.virt.hardware [None req-174a367c-d208-4d4f-a2c0-0d092ce927ed tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1869.883152] env[62405]: DEBUG nova.virt.hardware [None req-174a367c-d208-4d4f-a2c0-0d092ce927ed tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1869.884076] env[62405]: DEBUG nova.virt.hardware [None req-174a367c-d208-4d4f-a2c0-0d092ce927ed tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1869.884076] env[62405]: DEBUG nova.virt.hardware [None req-174a367c-d208-4d4f-a2c0-0d092ce927ed tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1869.884076] env[62405]: DEBUG nova.virt.hardware [None req-174a367c-d208-4d4f-a2c0-0d092ce927ed tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1869.884288] env[62405]: DEBUG nova.virt.hardware [None req-174a367c-d208-4d4f-a2c0-0d092ce927ed tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1869.885509] env[62405]: DEBUG nova.virt.hardware [None req-174a367c-d208-4d4f-a2c0-0d092ce927ed tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1869.885509] env[62405]: DEBUG nova.virt.hardware [None req-174a367c-d208-4d4f-a2c0-0d092ce927ed tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1869.885509] env[62405]: DEBUG nova.virt.hardware [None req-174a367c-d208-4d4f-a2c0-0d092ce927ed tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1869.886292] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a311f7b-cd2b-4e03-b331-fe0fabd13c33 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.898036] env[62405]: DEBUG oslo_vmware.api [None req-4eb17867-a80f-4d5a-af1b-ffbdf0589cd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]520868c4-af63-efb5-4647-3b3ec3961bd3, 'name': SearchDatastore_Task, 'duration_secs': 0.029459} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1869.904117] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aa5299b3-f282-408e-a3bd-16bd5cad7b6e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.911210] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9a389c1-2cce-4ef6-938e-1e17ef1250dd {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.922887] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f2bdc459-aecd-4894-bdb2-7cc11ef17746 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Acquiring lock "171910d2-02b8-4219-ae75-5cecccea1de3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1869.923303] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f2bdc459-aecd-4894-bdb2-7cc11ef17746 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Lock "171910d2-02b8-4219-ae75-5cecccea1de3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1869.923662] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f2bdc459-aecd-4894-bdb2-7cc11ef17746 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Acquiring lock "171910d2-02b8-4219-ae75-5cecccea1de3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1869.923987] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f2bdc459-aecd-4894-bdb2-7cc11ef17746 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Lock "171910d2-02b8-4219-ae75-5cecccea1de3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1869.924310] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f2bdc459-aecd-4894-bdb2-7cc11ef17746 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Lock "171910d2-02b8-4219-ae75-5cecccea1de3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1869.926935] env[62405]: INFO nova.compute.manager [None req-f2bdc459-aecd-4894-bdb2-7cc11ef17746 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] [instance: 171910d2-02b8-4219-ae75-5cecccea1de3] Terminating instance [ 1869.934558] env[62405]: DEBUG oslo_vmware.api [None req-4eb17867-a80f-4d5a-af1b-ffbdf0589cd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for the task: (returnval){ [ 1869.934558] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52d5b469-fb6c-05d5-6076-365d5b75e5cc" [ 1869.934558] env[62405]: _type = "Task" [ 1869.934558] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1869.949370] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-174a367c-d208-4d4f-a2c0-0d092ce927ed tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] [instance: 8f133517-cff2-40c7-8333-a9116163313a] Instance VIF info [] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1869.957329] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-174a367c-d208-4d4f-a2c0-0d092ce927ed tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Creating folder: Project (bf96abaf6edd42a4a20657d9b67a1fcf). Parent ref: group-v401284. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1869.962775] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e5a2daa4-a4a0-45e0-ab25-28975d58768d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.971476] env[62405]: DEBUG oslo_vmware.api [None req-4eb17867-a80f-4d5a-af1b-ffbdf0589cd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52d5b469-fb6c-05d5-6076-365d5b75e5cc, 'name': SearchDatastore_Task, 'duration_secs': 0.01758} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1869.971706] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4eb17867-a80f-4d5a-af1b-ffbdf0589cd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1869.971976] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-4eb17867-a80f-4d5a-af1b-ffbdf0589cd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 81aebf11-5d80-4a86-b232-3ecc5f3892c2/81aebf11-5d80-4a86-b232-3ecc5f3892c2.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1869.972270] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3aa3870b-3423-481a-8d87-8c3dc59227b8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.975442] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-174a367c-d208-4d4f-a2c0-0d092ce927ed tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Created folder: Project (bf96abaf6edd42a4a20657d9b67a1fcf) in parent group-v401284. [ 1869.975654] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-174a367c-d208-4d4f-a2c0-0d092ce927ed tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Creating folder: Instances. Parent ref: group-v401516. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1869.976220] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e169cba3-b89b-43f7-87bd-13156ffd58eb {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.981320] env[62405]: DEBUG oslo_vmware.api [None req-4eb17867-a80f-4d5a-af1b-ffbdf0589cd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for the task: (returnval){ [ 1869.981320] env[62405]: value = "task-1947704" [ 1869.981320] env[62405]: _type = "Task" [ 1869.981320] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1869.985612] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-174a367c-d208-4d4f-a2c0-0d092ce927ed tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Created folder: Instances in parent group-v401516. [ 1869.985844] env[62405]: DEBUG oslo.service.loopingcall [None req-174a367c-d208-4d4f-a2c0-0d092ce927ed tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1869.986414] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8f133517-cff2-40c7-8333-a9116163313a] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1869.986637] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5e695fbf-5bb4-45cf-8c06-865347e51712 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.001605] env[62405]: DEBUG oslo_vmware.api [None req-4eb17867-a80f-4d5a-af1b-ffbdf0589cd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947704, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1870.006912] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1870.006912] env[62405]: value = "task-1947706" [ 1870.006912] env[62405]: _type = "Task" [ 1870.006912] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1870.014800] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947706, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1870.029937] env[62405]: DEBUG nova.network.neutron [req-2fbb469d-4ac7-4233-88b4-239b728c70ba req-4162368b-1b3b-48f4-81fc-b69da991d0b6 service nova] [instance: 81aebf11-5d80-4a86-b232-3ecc5f3892c2] Updated VIF entry in instance network info cache for port af199d5b-90da-4443-ac9d-e8d6bf721a80. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1870.030332] env[62405]: DEBUG nova.network.neutron [req-2fbb469d-4ac7-4233-88b4-239b728c70ba req-4162368b-1b3b-48f4-81fc-b69da991d0b6 service nova] [instance: 81aebf11-5d80-4a86-b232-3ecc5f3892c2] Updating instance_info_cache with network_info: [{"id": "af199d5b-90da-4443-ac9d-e8d6bf721a80", "address": "fa:16:3e:80:08:de", "network": {"id": "845c4582-a0c8-4565-8025-5cc0fd22ff9a", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1066509768-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f1a1645e38674042828c78155974f95e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaf199d5b-90", "ovs_interfaceid": "af199d5b-90da-4443-ac9d-e8d6bf721a80", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1870.334513] env[62405]: DEBUG oslo_concurrency.lockutils [None req-174323ed-b724-48a5-8176-e8f7719d6f0e tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Lock "86378df0-a658-427d-aca5-de25f84eb28b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.899s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1870.382369] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7153bd6b-4d5b-44a6-9739-370310d8a0f4 tempest-AttachInterfacesUnderV243Test-1269993824 tempest-AttachInterfacesUnderV243Test-1269993824-project-member] Lock "742c8d94-48d1-4408-91dc-98f25661aa8d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.297s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1870.382891] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "742c8d94-48d1-4408-91dc-98f25661aa8d" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 17.125s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1870.383701] env[62405]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-43669fdc-e266-4706-96d7-decbdf2003cd {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.394907] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-334b043c-743b-452e-a5ae-1b2766fd7056 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.435313] env[62405]: DEBUG nova.compute.manager [None req-f2bdc459-aecd-4894-bdb2-7cc11ef17746 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] [instance: 171910d2-02b8-4219-ae75-5cecccea1de3] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1870.435542] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-f2bdc459-aecd-4894-bdb2-7cc11ef17746 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] [instance: 171910d2-02b8-4219-ae75-5cecccea1de3] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1870.439265] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e7b6842-866b-4959-83b7-52a5d48012b2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.442146] env[62405]: DEBUG oslo_concurrency.lockutils [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Acquiring lock "78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1870.442738] env[62405]: DEBUG oslo_concurrency.lockutils [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Lock "78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1870.451172] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2bdc459-aecd-4894-bdb2-7cc11ef17746 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] [instance: 171910d2-02b8-4219-ae75-5cecccea1de3] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1870.451172] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-062873fc-c96c-41be-b00a-40ac12fc7d1b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.459072] env[62405]: DEBUG oslo_vmware.api [None req-f2bdc459-aecd-4894-bdb2-7cc11ef17746 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Waiting for the task: (returnval){ [ 1870.459072] env[62405]: value = "task-1947707" [ 1870.459072] env[62405]: _type = "Task" [ 1870.459072] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1870.469762] env[62405]: DEBUG oslo_vmware.api [None req-f2bdc459-aecd-4894-bdb2-7cc11ef17746 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Task: {'id': task-1947707, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1870.498406] env[62405]: DEBUG oslo_vmware.api [None req-4eb17867-a80f-4d5a-af1b-ffbdf0589cd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947704, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1870.516725] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947706, 'name': CreateVM_Task, 'duration_secs': 0.399537} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1870.516917] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8f133517-cff2-40c7-8333-a9116163313a] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1870.517524] env[62405]: DEBUG oslo_concurrency.lockutils [None req-174a367c-d208-4d4f-a2c0-0d092ce927ed tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1870.517711] env[62405]: DEBUG oslo_concurrency.lockutils [None req-174a367c-d208-4d4f-a2c0-0d092ce927ed tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1870.518440] env[62405]: DEBUG oslo_concurrency.lockutils [None req-174a367c-d208-4d4f-a2c0-0d092ce927ed tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1870.518724] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3db3e561-99b1-4a06-ab38-089251038783 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.526401] env[62405]: DEBUG oslo_vmware.api [None req-174a367c-d208-4d4f-a2c0-0d092ce927ed tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Waiting for the task: (returnval){ [ 1870.526401] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5245f8d0-ddde-9ada-c72e-0d24969d48bc" [ 1870.526401] env[62405]: _type = "Task" [ 1870.526401] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1870.532593] env[62405]: DEBUG oslo_concurrency.lockutils [req-2fbb469d-4ac7-4233-88b4-239b728c70ba req-4162368b-1b3b-48f4-81fc-b69da991d0b6 service nova] Releasing lock "refresh_cache-81aebf11-5d80-4a86-b232-3ecc5f3892c2" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1870.536890] env[62405]: DEBUG oslo_vmware.api [None req-174a367c-d208-4d4f-a2c0-0d092ce927ed tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5245f8d0-ddde-9ada-c72e-0d24969d48bc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1870.941008] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "742c8d94-48d1-4408-91dc-98f25661aa8d" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.559s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1870.945452] env[62405]: DEBUG nova.compute.manager [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1870.971783] env[62405]: DEBUG oslo_vmware.api [None req-f2bdc459-aecd-4894-bdb2-7cc11ef17746 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Task: {'id': task-1947707, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1870.991312] env[62405]: DEBUG oslo_vmware.api [None req-4eb17867-a80f-4d5a-af1b-ffbdf0589cd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947704, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.733505} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1870.991626] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-4eb17867-a80f-4d5a-af1b-ffbdf0589cd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 81aebf11-5d80-4a86-b232-3ecc5f3892c2/81aebf11-5d80-4a86-b232-3ecc5f3892c2.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1870.991842] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-4eb17867-a80f-4d5a-af1b-ffbdf0589cd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: 81aebf11-5d80-4a86-b232-3ecc5f3892c2] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1870.992110] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-918e59f3-a9ae-4040-b63f-335c54f302d1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.000112] env[62405]: DEBUG oslo_vmware.api [None req-4eb17867-a80f-4d5a-af1b-ffbdf0589cd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for the task: (returnval){ [ 1871.000112] env[62405]: value = "task-1947708" [ 1871.000112] env[62405]: _type = "Task" [ 1871.000112] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1871.010160] env[62405]: DEBUG oslo_vmware.api [None req-4eb17867-a80f-4d5a-af1b-ffbdf0589cd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947708, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1871.041774] env[62405]: DEBUG oslo_vmware.api [None req-174a367c-d208-4d4f-a2c0-0d092ce927ed tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5245f8d0-ddde-9ada-c72e-0d24969d48bc, 'name': SearchDatastore_Task, 'duration_secs': 0.070691} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1871.042214] env[62405]: DEBUG oslo_concurrency.lockutils [None req-174a367c-d208-4d4f-a2c0-0d092ce927ed tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1871.042492] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-174a367c-d208-4d4f-a2c0-0d092ce927ed tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] [instance: 8f133517-cff2-40c7-8333-a9116163313a] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1871.042741] env[62405]: DEBUG oslo_concurrency.lockutils [None req-174a367c-d208-4d4f-a2c0-0d092ce927ed tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1871.042891] env[62405]: DEBUG oslo_concurrency.lockutils [None req-174a367c-d208-4d4f-a2c0-0d092ce927ed tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1871.043087] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-174a367c-d208-4d4f-a2c0-0d092ce927ed tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1871.043418] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3c82fc41-c930-4bbc-9c1b-8b0a4f9c514e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.056616] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-174a367c-d208-4d4f-a2c0-0d092ce927ed tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1871.056808] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-174a367c-d208-4d4f-a2c0-0d092ce927ed tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1871.057672] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-56b3654f-d77c-4833-9339-c4457f0bf5e6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.066025] env[62405]: DEBUG oslo_vmware.api [None req-174a367c-d208-4d4f-a2c0-0d092ce927ed tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Waiting for the task: (returnval){ [ 1871.066025] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52870f05-1872-16e6-bb33-f84fe87f48d9" [ 1871.066025] env[62405]: _type = "Task" [ 1871.066025] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1871.074296] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c53a7ace-30cc-43ab-9679-2e54a8736aab tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Acquiring lock "41e5385d-f0c7-4431-8424-e60dbeebaf8e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1871.074637] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c53a7ace-30cc-43ab-9679-2e54a8736aab tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Lock "41e5385d-f0c7-4431-8424-e60dbeebaf8e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1871.080024] env[62405]: DEBUG oslo_vmware.api [None req-174a367c-d208-4d4f-a2c0-0d092ce927ed tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52870f05-1872-16e6-bb33-f84fe87f48d9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1871.095253] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3340a102-3259-439a-9dfa-ed4e27484546 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquiring lock "interface-9d97bf1d-6830-48b1-831b-bf2b52188f32-a466989b-10e1-492c-a30a-33ba96b092ca" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1871.095388] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3340a102-3259-439a-9dfa-ed4e27484546 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Lock "interface-9d97bf1d-6830-48b1-831b-bf2b52188f32-a466989b-10e1-492c-a30a-33ba96b092ca" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1871.138742] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Acquiring lock "3b9a6a82-a426-4802-9640-5b39e5e0ff49" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1871.138980] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Lock "3b9a6a82-a426-4802-9640-5b39e5e0ff49" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1871.199691] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ca2def4-9faf-4dbc-abb3-82350db16a05 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.207703] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49a0c4e7-8993-4c01-bbe2-6611f2671bd9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.237881] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a442b49-9084-460f-be3f-d0d533aab1ed {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.245097] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5df66847-8675-4383-b649-303d06fd7495 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.259312] env[62405]: DEBUG nova.compute.provider_tree [None req-bf5fbeee-54e7-4270-8309-84e1125402f3 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1871.470094] env[62405]: DEBUG oslo_vmware.api [None req-f2bdc459-aecd-4894-bdb2-7cc11ef17746 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Task: {'id': task-1947707, 'name': PowerOffVM_Task, 'duration_secs': 0.62201} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1871.471081] env[62405]: DEBUG oslo_concurrency.lockutils [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1871.471364] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2bdc459-aecd-4894-bdb2-7cc11ef17746 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] [instance: 171910d2-02b8-4219-ae75-5cecccea1de3] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1871.471560] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-f2bdc459-aecd-4894-bdb2-7cc11ef17746 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] [instance: 171910d2-02b8-4219-ae75-5cecccea1de3] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1871.471804] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e7fb65e3-ff3c-4528-a4e5-01c933c10026 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.509730] env[62405]: DEBUG oslo_vmware.api [None req-4eb17867-a80f-4d5a-af1b-ffbdf0589cd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947708, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069252} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1871.510084] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-4eb17867-a80f-4d5a-af1b-ffbdf0589cd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: 81aebf11-5d80-4a86-b232-3ecc5f3892c2] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1871.510846] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2d913fe-60f7-4c6a-80ac-dd8d81aa7766 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.533016] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-4eb17867-a80f-4d5a-af1b-ffbdf0589cd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: 81aebf11-5d80-4a86-b232-3ecc5f3892c2] Reconfiguring VM instance instance-00000053 to attach disk [datastore1] 81aebf11-5d80-4a86-b232-3ecc5f3892c2/81aebf11-5d80-4a86-b232-3ecc5f3892c2.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1871.533801] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-970b3e09-5059-4f70-8273-0271867433df {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.553369] env[62405]: DEBUG oslo_vmware.api [None req-4eb17867-a80f-4d5a-af1b-ffbdf0589cd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for the task: (returnval){ [ 1871.553369] env[62405]: value = "task-1947710" [ 1871.553369] env[62405]: _type = "Task" [ 1871.553369] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1871.562089] env[62405]: DEBUG oslo_vmware.api [None req-4eb17867-a80f-4d5a-af1b-ffbdf0589cd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947710, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1871.576149] env[62405]: DEBUG oslo_vmware.api [None req-174a367c-d208-4d4f-a2c0-0d092ce927ed tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52870f05-1872-16e6-bb33-f84fe87f48d9, 'name': SearchDatastore_Task, 'duration_secs': 0.013326} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1871.577152] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c03635d6-b1cc-4f18-b4eb-83e9d0513571 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.582923] env[62405]: DEBUG oslo_vmware.api [None req-174a367c-d208-4d4f-a2c0-0d092ce927ed tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Waiting for the task: (returnval){ [ 1871.582923] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52924415-9e6d-b1dc-cc60-7620c24d3430" [ 1871.582923] env[62405]: _type = "Task" [ 1871.582923] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1871.586709] env[62405]: DEBUG nova.compute.manager [None req-c53a7ace-30cc-43ab-9679-2e54a8736aab tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 41e5385d-f0c7-4431-8424-e60dbeebaf8e] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1871.594235] env[62405]: DEBUG oslo_vmware.api [None req-174a367c-d208-4d4f-a2c0-0d092ce927ed tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52924415-9e6d-b1dc-cc60-7620c24d3430, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1871.597837] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3340a102-3259-439a-9dfa-ed4e27484546 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquiring lock "9d97bf1d-6830-48b1-831b-bf2b52188f32" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1871.598216] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3340a102-3259-439a-9dfa-ed4e27484546 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquired lock "9d97bf1d-6830-48b1-831b-bf2b52188f32" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1871.599078] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fac7a5dc-8a8d-4369-9921-477a1b3cda94 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.616876] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b82b8b4c-ef9c-4073-b34b-04ab46172c03 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.646970] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-3340a102-3259-439a-9dfa-ed4e27484546 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Reconfiguring VM to detach interface {{(pid=62405) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1871.648163] env[62405]: DEBUG nova.compute.manager [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 3b9a6a82-a426-4802-9640-5b39e5e0ff49] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1871.650728] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ce5bc97a-0741-45eb-86cb-d76f43ba57d3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.673634] env[62405]: DEBUG oslo_vmware.api [None req-3340a102-3259-439a-9dfa-ed4e27484546 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Waiting for the task: (returnval){ [ 1871.673634] env[62405]: value = "task-1947711" [ 1871.673634] env[62405]: _type = "Task" [ 1871.673634] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1871.678911] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-f2bdc459-aecd-4894-bdb2-7cc11ef17746 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] [instance: 171910d2-02b8-4219-ae75-5cecccea1de3] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1871.679158] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-f2bdc459-aecd-4894-bdb2-7cc11ef17746 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] [instance: 171910d2-02b8-4219-ae75-5cecccea1de3] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1871.679386] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-f2bdc459-aecd-4894-bdb2-7cc11ef17746 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Deleting the datastore file [datastore1] 171910d2-02b8-4219-ae75-5cecccea1de3 {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1871.680070] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cf4a0f11-abe7-4c4c-ba5a-adfba1730753 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.685527] env[62405]: DEBUG oslo_vmware.api [None req-3340a102-3259-439a-9dfa-ed4e27484546 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947711, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1871.690707] env[62405]: DEBUG oslo_vmware.api [None req-f2bdc459-aecd-4894-bdb2-7cc11ef17746 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Waiting for the task: (returnval){ [ 1871.690707] env[62405]: value = "task-1947712" [ 1871.690707] env[62405]: _type = "Task" [ 1871.690707] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1871.699485] env[62405]: DEBUG oslo_vmware.api [None req-f2bdc459-aecd-4894-bdb2-7cc11ef17746 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Task: {'id': task-1947712, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1871.762462] env[62405]: DEBUG nova.scheduler.client.report [None req-bf5fbeee-54e7-4270-8309-84e1125402f3 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1872.063105] env[62405]: DEBUG oslo_vmware.api [None req-4eb17867-a80f-4d5a-af1b-ffbdf0589cd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947710, 'name': ReconfigVM_Task, 'duration_secs': 0.275168} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1872.063405] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-4eb17867-a80f-4d5a-af1b-ffbdf0589cd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: 81aebf11-5d80-4a86-b232-3ecc5f3892c2] Reconfigured VM instance instance-00000053 to attach disk [datastore1] 81aebf11-5d80-4a86-b232-3ecc5f3892c2/81aebf11-5d80-4a86-b232-3ecc5f3892c2.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1872.064058] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5cfa943f-a5f1-4732-955a-9d6354367a29 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.071181] env[62405]: DEBUG oslo_vmware.api [None req-4eb17867-a80f-4d5a-af1b-ffbdf0589cd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for the task: (returnval){ [ 1872.071181] env[62405]: value = "task-1947713" [ 1872.071181] env[62405]: _type = "Task" [ 1872.071181] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1872.079232] env[62405]: DEBUG oslo_vmware.api [None req-4eb17867-a80f-4d5a-af1b-ffbdf0589cd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947713, 'name': Rename_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1872.094180] env[62405]: DEBUG oslo_vmware.api [None req-174a367c-d208-4d4f-a2c0-0d092ce927ed tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52924415-9e6d-b1dc-cc60-7620c24d3430, 'name': SearchDatastore_Task, 'duration_secs': 0.012645} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1872.094180] env[62405]: DEBUG oslo_concurrency.lockutils [None req-174a367c-d208-4d4f-a2c0-0d092ce927ed tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1872.094588] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-174a367c-d208-4d4f-a2c0-0d092ce927ed tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 8f133517-cff2-40c7-8333-a9116163313a/8f133517-cff2-40c7-8333-a9116163313a.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1872.097929] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-280595fd-50cd-4589-afae-6147540e18bc {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.108158] env[62405]: DEBUG oslo_vmware.api [None req-174a367c-d208-4d4f-a2c0-0d092ce927ed tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Waiting for the task: (returnval){ [ 1872.108158] env[62405]: value = "task-1947714" [ 1872.108158] env[62405]: _type = "Task" [ 1872.108158] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1872.117125] env[62405]: DEBUG oslo_vmware.api [None req-174a367c-d208-4d4f-a2c0-0d092ce927ed tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Task: {'id': task-1947714, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1872.118799] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c53a7ace-30cc-43ab-9679-2e54a8736aab tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1872.187668] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1872.192869] env[62405]: DEBUG oslo_vmware.api [None req-3340a102-3259-439a-9dfa-ed4e27484546 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947711, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1872.205031] env[62405]: DEBUG oslo_vmware.api [None req-f2bdc459-aecd-4894-bdb2-7cc11ef17746 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Task: {'id': task-1947712, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.435346} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1872.205031] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-f2bdc459-aecd-4894-bdb2-7cc11ef17746 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1872.205031] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-f2bdc459-aecd-4894-bdb2-7cc11ef17746 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] [instance: 171910d2-02b8-4219-ae75-5cecccea1de3] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1872.205031] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-f2bdc459-aecd-4894-bdb2-7cc11ef17746 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] [instance: 171910d2-02b8-4219-ae75-5cecccea1de3] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1872.205031] env[62405]: INFO nova.compute.manager [None req-f2bdc459-aecd-4894-bdb2-7cc11ef17746 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] [instance: 171910d2-02b8-4219-ae75-5cecccea1de3] Took 1.77 seconds to destroy the instance on the hypervisor. [ 1872.205031] env[62405]: DEBUG oslo.service.loopingcall [None req-f2bdc459-aecd-4894-bdb2-7cc11ef17746 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1872.205031] env[62405]: DEBUG nova.compute.manager [-] [instance: 171910d2-02b8-4219-ae75-5cecccea1de3] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1872.205031] env[62405]: DEBUG nova.network.neutron [-] [instance: 171910d2-02b8-4219-ae75-5cecccea1de3] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1872.269636] env[62405]: DEBUG oslo_concurrency.lockutils [None req-bf5fbeee-54e7-4270-8309-84e1125402f3 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.427s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1872.270149] env[62405]: DEBUG nova.compute.manager [None req-bf5fbeee-54e7-4270-8309-84e1125402f3 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59fe34ab-c01d-4083-8bcd-ad6b4133a66f] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1872.273243] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8fb3f872-37de-4222-9954-c11fb8576251 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.581s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1872.273473] env[62405]: DEBUG nova.objects.instance [None req-8fb3f872-37de-4222-9954-c11fb8576251 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Lazy-loading 'resources' on Instance uuid 79548471-56f8-410c-a664-d2242541cd2a {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1872.489838] env[62405]: DEBUG nova.compute.manager [req-397bfc2b-2de0-43f2-91d8-ff39bbb71f4c req-605afc98-16fb-4bca-b43f-0249dbccb353 service nova] [instance: 171910d2-02b8-4219-ae75-5cecccea1de3] Received event network-vif-deleted-0a8cd850-d9e3-4640-98b7-44b386609be6 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1872.490127] env[62405]: INFO nova.compute.manager [req-397bfc2b-2de0-43f2-91d8-ff39bbb71f4c req-605afc98-16fb-4bca-b43f-0249dbccb353 service nova] [instance: 171910d2-02b8-4219-ae75-5cecccea1de3] Neutron deleted interface 0a8cd850-d9e3-4640-98b7-44b386609be6; detaching it from the instance and deleting it from the info cache [ 1872.490331] env[62405]: DEBUG nova.network.neutron [req-397bfc2b-2de0-43f2-91d8-ff39bbb71f4c req-605afc98-16fb-4bca-b43f-0249dbccb353 service nova] [instance: 171910d2-02b8-4219-ae75-5cecccea1de3] Updating instance_info_cache with network_info: [{"id": "2b494e96-08e7-4608-a930-5d9da520c342", "address": "fa:16:3e:45:c3:cf", "network": {"id": "daafb1e7-9436-4cb4-8856-6378822f38ea", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1626101583", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.40", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "e0d6dfea772e432289163b14e9e341c1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b4d548e7-d762-406a-bb2d-dc7168a8ca67", "external-id": "nsx-vlan-transportzone-796", "segmentation_id": 796, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2b494e96-08", "ovs_interfaceid": "2b494e96-08e7-4608-a930-5d9da520c342", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1872.585029] env[62405]: DEBUG oslo_vmware.api [None req-4eb17867-a80f-4d5a-af1b-ffbdf0589cd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947713, 'name': Rename_Task, 'duration_secs': 0.164826} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1872.585339] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-4eb17867-a80f-4d5a-af1b-ffbdf0589cd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: 81aebf11-5d80-4a86-b232-3ecc5f3892c2] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1872.585586] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b44e0ea6-e92c-49e5-b96a-cde8faca2907 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.594606] env[62405]: DEBUG oslo_vmware.api [None req-4eb17867-a80f-4d5a-af1b-ffbdf0589cd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for the task: (returnval){ [ 1872.594606] env[62405]: value = "task-1947715" [ 1872.594606] env[62405]: _type = "Task" [ 1872.594606] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1872.609654] env[62405]: DEBUG oslo_vmware.api [None req-4eb17867-a80f-4d5a-af1b-ffbdf0589cd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947715, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1872.627039] env[62405]: DEBUG oslo_vmware.api [None req-174a367c-d208-4d4f-a2c0-0d092ce927ed tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Task: {'id': task-1947714, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1872.690576] env[62405]: DEBUG oslo_vmware.api [None req-3340a102-3259-439a-9dfa-ed4e27484546 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947711, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1872.778863] env[62405]: DEBUG nova.compute.utils [None req-bf5fbeee-54e7-4270-8309-84e1125402f3 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1872.784837] env[62405]: DEBUG nova.compute.manager [None req-bf5fbeee-54e7-4270-8309-84e1125402f3 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59fe34ab-c01d-4083-8bcd-ad6b4133a66f] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1872.784837] env[62405]: DEBUG nova.network.neutron [None req-bf5fbeee-54e7-4270-8309-84e1125402f3 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59fe34ab-c01d-4083-8bcd-ad6b4133a66f] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1872.888341] env[62405]: DEBUG nova.policy [None req-bf5fbeee-54e7-4270-8309-84e1125402f3 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6472af0b6f6240f297f7f137cde41929', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bb1da47e8b1a400fab7817d9e6b282ed', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1872.998147] env[62405]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-07aafc9a-0412-49a0-a0ca-16e0988d1aa3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.008125] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6f9785f-3778-42f9-adeb-19d7eaa63b99 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.046133] env[62405]: DEBUG nova.compute.manager [req-397bfc2b-2de0-43f2-91d8-ff39bbb71f4c req-605afc98-16fb-4bca-b43f-0249dbccb353 service nova] [instance: 171910d2-02b8-4219-ae75-5cecccea1de3] Detach interface failed, port_id=0a8cd850-d9e3-4640-98b7-44b386609be6, reason: Instance 171910d2-02b8-4219-ae75-5cecccea1de3 could not be found. {{(pid=62405) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11483}} [ 1873.104908] env[62405]: DEBUG oslo_vmware.api [None req-4eb17867-a80f-4d5a-af1b-ffbdf0589cd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947715, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1873.120149] env[62405]: DEBUG oslo_vmware.api [None req-174a367c-d208-4d4f-a2c0-0d092ce927ed tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Task: {'id': task-1947714, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.884571} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1873.120223] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-174a367c-d208-4d4f-a2c0-0d092ce927ed tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 8f133517-cff2-40c7-8333-a9116163313a/8f133517-cff2-40c7-8333-a9116163313a.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1873.120487] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-174a367c-d208-4d4f-a2c0-0d092ce927ed tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] [instance: 8f133517-cff2-40c7-8333-a9116163313a] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1873.120781] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0aafbb8e-b251-4b3b-abb7-4e56baae5f64 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.132278] env[62405]: DEBUG oslo_vmware.api [None req-174a367c-d208-4d4f-a2c0-0d092ce927ed tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Waiting for the task: (returnval){ [ 1873.132278] env[62405]: value = "task-1947716" [ 1873.132278] env[62405]: _type = "Task" [ 1873.132278] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1873.141069] env[62405]: DEBUG oslo_vmware.api [None req-174a367c-d208-4d4f-a2c0-0d092ce927ed tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Task: {'id': task-1947716, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1873.147261] env[62405]: DEBUG nova.network.neutron [-] [instance: 171910d2-02b8-4219-ae75-5cecccea1de3] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1873.189582] env[62405]: DEBUG oslo_vmware.api [None req-3340a102-3259-439a-9dfa-ed4e27484546 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947711, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1873.206943] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa7221cd-e6c5-496a-b6cf-225d267875d2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.214883] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b41efa87-1613-4ccb-be0d-8f4aeed81455 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.247528] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eae75e96-3ecc-41dd-8137-e69b18845daf {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.255283] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e6b941d-1e70-456b-b4a9-11889565327d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.269051] env[62405]: DEBUG nova.compute.provider_tree [None req-8fb3f872-37de-4222-9954-c11fb8576251 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1873.287778] env[62405]: DEBUG nova.compute.manager [None req-bf5fbeee-54e7-4270-8309-84e1125402f3 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59fe34ab-c01d-4083-8bcd-ad6b4133a66f] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1873.350390] env[62405]: DEBUG nova.network.neutron [None req-bf5fbeee-54e7-4270-8309-84e1125402f3 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59fe34ab-c01d-4083-8bcd-ad6b4133a66f] Successfully created port: 55e7f388-c46a-48fe-b363-c49bbbe7f6b4 {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1873.605696] env[62405]: DEBUG oslo_vmware.api [None req-4eb17867-a80f-4d5a-af1b-ffbdf0589cd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947715, 'name': PowerOnVM_Task, 'duration_secs': 0.657071} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1873.606689] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-4eb17867-a80f-4d5a-af1b-ffbdf0589cd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: 81aebf11-5d80-4a86-b232-3ecc5f3892c2] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1873.606689] env[62405]: INFO nova.compute.manager [None req-4eb17867-a80f-4d5a-af1b-ffbdf0589cd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: 81aebf11-5d80-4a86-b232-3ecc5f3892c2] Took 8.29 seconds to spawn the instance on the hypervisor. [ 1873.606846] env[62405]: DEBUG nova.compute.manager [None req-4eb17867-a80f-4d5a-af1b-ffbdf0589cd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: 81aebf11-5d80-4a86-b232-3ecc5f3892c2] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1873.607631] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d95372e3-50ca-4e55-9122-0b4196da2c6a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.642750] env[62405]: DEBUG oslo_vmware.api [None req-174a367c-d208-4d4f-a2c0-0d092ce927ed tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Task: {'id': task-1947716, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07768} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1873.643585] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-174a367c-d208-4d4f-a2c0-0d092ce927ed tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] [instance: 8f133517-cff2-40c7-8333-a9116163313a] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1873.643849] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45a54771-204b-46a5-97fa-f760b2191e0a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.655189] env[62405]: INFO nova.compute.manager [-] [instance: 171910d2-02b8-4219-ae75-5cecccea1de3] Took 1.45 seconds to deallocate network for instance. [ 1873.663978] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-174a367c-d208-4d4f-a2c0-0d092ce927ed tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] [instance: 8f133517-cff2-40c7-8333-a9116163313a] Reconfiguring VM instance instance-00000054 to attach disk [datastore1] 8f133517-cff2-40c7-8333-a9116163313a/8f133517-cff2-40c7-8333-a9116163313a.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1873.666727] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cae942a3-ada1-44c3-b2ca-86140811ebd1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.693312] env[62405]: DEBUG oslo_vmware.api [None req-3340a102-3259-439a-9dfa-ed4e27484546 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947711, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1873.694661] env[62405]: DEBUG oslo_vmware.api [None req-174a367c-d208-4d4f-a2c0-0d092ce927ed tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Waiting for the task: (returnval){ [ 1873.694661] env[62405]: value = "task-1947717" [ 1873.694661] env[62405]: _type = "Task" [ 1873.694661] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1873.703756] env[62405]: DEBUG oslo_vmware.api [None req-174a367c-d208-4d4f-a2c0-0d092ce927ed tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Task: {'id': task-1947717, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1873.771759] env[62405]: DEBUG nova.scheduler.client.report [None req-8fb3f872-37de-4222-9954-c11fb8576251 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1874.128584] env[62405]: INFO nova.compute.manager [None req-4eb17867-a80f-4d5a-af1b-ffbdf0589cd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: 81aebf11-5d80-4a86-b232-3ecc5f3892c2] Took 36.19 seconds to build instance. [ 1874.188883] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f2bdc459-aecd-4894-bdb2-7cc11ef17746 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1874.194742] env[62405]: DEBUG oslo_vmware.api [None req-3340a102-3259-439a-9dfa-ed4e27484546 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947711, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1874.202698] env[62405]: DEBUG oslo_vmware.api [None req-174a367c-d208-4d4f-a2c0-0d092ce927ed tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Task: {'id': task-1947717, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1874.279768] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8fb3f872-37de-4222-9954-c11fb8576251 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.006s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1874.283483] env[62405]: DEBUG oslo_concurrency.lockutils [None req-aaf55f6a-42fd-4537-9ca0-b62d12c69a21 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.240s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1874.285095] env[62405]: INFO nova.compute.claims [None req-aaf55f6a-42fd-4537-9ca0-b62d12c69a21 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 08d7be6c-0557-46af-ae8d-e1c68e878cae] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1874.298203] env[62405]: DEBUG nova.compute.manager [None req-bf5fbeee-54e7-4270-8309-84e1125402f3 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59fe34ab-c01d-4083-8bcd-ad6b4133a66f] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1874.302610] env[62405]: INFO nova.scheduler.client.report [None req-8fb3f872-37de-4222-9954-c11fb8576251 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Deleted allocations for instance 79548471-56f8-410c-a664-d2242541cd2a [ 1874.338657] env[62405]: DEBUG nova.virt.hardware [None req-bf5fbeee-54e7-4270-8309-84e1125402f3 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1874.339152] env[62405]: DEBUG nova.virt.hardware [None req-bf5fbeee-54e7-4270-8309-84e1125402f3 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1874.339152] env[62405]: DEBUG nova.virt.hardware [None req-bf5fbeee-54e7-4270-8309-84e1125402f3 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1874.339459] env[62405]: DEBUG nova.virt.hardware [None req-bf5fbeee-54e7-4270-8309-84e1125402f3 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1874.339518] env[62405]: DEBUG nova.virt.hardware [None req-bf5fbeee-54e7-4270-8309-84e1125402f3 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1874.339633] env[62405]: DEBUG nova.virt.hardware [None req-bf5fbeee-54e7-4270-8309-84e1125402f3 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1874.340094] env[62405]: DEBUG nova.virt.hardware [None req-bf5fbeee-54e7-4270-8309-84e1125402f3 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1874.340094] env[62405]: DEBUG nova.virt.hardware [None req-bf5fbeee-54e7-4270-8309-84e1125402f3 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1874.340210] env[62405]: DEBUG nova.virt.hardware [None req-bf5fbeee-54e7-4270-8309-84e1125402f3 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1874.340354] env[62405]: DEBUG nova.virt.hardware [None req-bf5fbeee-54e7-4270-8309-84e1125402f3 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1874.340757] env[62405]: DEBUG nova.virt.hardware [None req-bf5fbeee-54e7-4270-8309-84e1125402f3 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1874.341695] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca66a0d7-01fa-4d1a-ae64-1de8961a97d1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.353977] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-010b05f5-655b-407b-ba10-82e18034726f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.630467] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4eb17867-a80f-4d5a-af1b-ffbdf0589cd0 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Lock "81aebf11-5d80-4a86-b232-3ecc5f3892c2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.707s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1874.635756] env[62405]: DEBUG nova.compute.manager [req-3cb3c135-558f-4c89-923f-d0449005dab8 req-128cd44b-628a-435b-97b0-30eb9c32140c service nova] [instance: 171910d2-02b8-4219-ae75-5cecccea1de3] Received event network-vif-deleted-2b494e96-08e7-4608-a930-5d9da520c342 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1874.695827] env[62405]: DEBUG oslo_vmware.api [None req-3340a102-3259-439a-9dfa-ed4e27484546 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947711, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1874.704114] env[62405]: DEBUG oslo_vmware.api [None req-174a367c-d208-4d4f-a2c0-0d092ce927ed tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Task: {'id': task-1947717, 'name': ReconfigVM_Task, 'duration_secs': 0.899434} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1874.704356] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-174a367c-d208-4d4f-a2c0-0d092ce927ed tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] [instance: 8f133517-cff2-40c7-8333-a9116163313a] Reconfigured VM instance instance-00000054 to attach disk [datastore1] 8f133517-cff2-40c7-8333-a9116163313a/8f133517-cff2-40c7-8333-a9116163313a.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1874.704928] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a14df99c-2887-462b-a8cd-cba6a1418d98 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.711245] env[62405]: DEBUG oslo_vmware.api [None req-174a367c-d208-4d4f-a2c0-0d092ce927ed tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Waiting for the task: (returnval){ [ 1874.711245] env[62405]: value = "task-1947718" [ 1874.711245] env[62405]: _type = "Task" [ 1874.711245] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1874.719095] env[62405]: DEBUG oslo_vmware.api [None req-174a367c-d208-4d4f-a2c0-0d092ce927ed tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Task: {'id': task-1947718, 'name': Rename_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1874.815057] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8fb3f872-37de-4222-9954-c11fb8576251 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Lock "79548471-56f8-410c-a664-d2242541cd2a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.640s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1874.815057] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "79548471-56f8-410c-a664-d2242541cd2a" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 21.557s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1874.815057] env[62405]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-911454bc-77cd-4684-8f84-9c64f91d2d0a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.830165] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-084d763c-3264-497f-8f5e-43a805f0c67b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.044707] env[62405]: DEBUG nova.network.neutron [None req-bf5fbeee-54e7-4270-8309-84e1125402f3 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59fe34ab-c01d-4083-8bcd-ad6b4133a66f] Successfully updated port: 55e7f388-c46a-48fe-b363-c49bbbe7f6b4 {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1875.196609] env[62405]: DEBUG oslo_vmware.api [None req-3340a102-3259-439a-9dfa-ed4e27484546 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947711, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1875.222059] env[62405]: DEBUG oslo_vmware.api [None req-174a367c-d208-4d4f-a2c0-0d092ce927ed tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Task: {'id': task-1947718, 'name': Rename_Task, 'duration_secs': 0.16927} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1875.222428] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-174a367c-d208-4d4f-a2c0-0d092ce927ed tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] [instance: 8f133517-cff2-40c7-8333-a9116163313a] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1875.222725] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-eb77d547-98b5-4f73-aec9-4d1445b00f66 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.229821] env[62405]: DEBUG oslo_vmware.api [None req-174a367c-d208-4d4f-a2c0-0d092ce927ed tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Waiting for the task: (returnval){ [ 1875.229821] env[62405]: value = "task-1947719" [ 1875.229821] env[62405]: _type = "Task" [ 1875.229821] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1875.238335] env[62405]: DEBUG oslo_vmware.api [None req-174a367c-d208-4d4f-a2c0-0d092ce927ed tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Task: {'id': task-1947719, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1875.373319] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "79548471-56f8-410c-a664-d2242541cd2a" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.560s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1875.556049] env[62405]: DEBUG oslo_concurrency.lockutils [None req-bf5fbeee-54e7-4270-8309-84e1125402f3 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Acquiring lock "refresh_cache-59fe34ab-c01d-4083-8bcd-ad6b4133a66f" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1875.556342] env[62405]: DEBUG oslo_concurrency.lockutils [None req-bf5fbeee-54e7-4270-8309-84e1125402f3 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Acquired lock "refresh_cache-59fe34ab-c01d-4083-8bcd-ad6b4133a66f" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1875.556585] env[62405]: DEBUG nova.network.neutron [None req-bf5fbeee-54e7-4270-8309-84e1125402f3 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59fe34ab-c01d-4083-8bcd-ad6b4133a66f] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1875.604414] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ddb9716-1abc-4a38-b682-cc73562537a3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.612130] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5f63406-1c37-454e-a03e-3b2b677708c8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.646361] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76f23651-ce74-4b76-9d1f-05be9ed9fe12 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.653970] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf4af971-db3e-424f-bb5c-b72259695e01 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.672123] env[62405]: DEBUG nova.compute.provider_tree [None req-aaf55f6a-42fd-4537-9ca0-b62d12c69a21 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1875.696784] env[62405]: DEBUG oslo_vmware.api [None req-3340a102-3259-439a-9dfa-ed4e27484546 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947711, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1875.740971] env[62405]: DEBUG oslo_vmware.api [None req-174a367c-d208-4d4f-a2c0-0d092ce927ed tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Task: {'id': task-1947719, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1876.095734] env[62405]: DEBUG nova.network.neutron [None req-bf5fbeee-54e7-4270-8309-84e1125402f3 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59fe34ab-c01d-4083-8bcd-ad6b4133a66f] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1876.175289] env[62405]: DEBUG nova.scheduler.client.report [None req-aaf55f6a-42fd-4537-9ca0-b62d12c69a21 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1876.196580] env[62405]: DEBUG oslo_vmware.api [None req-3340a102-3259-439a-9dfa-ed4e27484546 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947711, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1876.249626] env[62405]: DEBUG oslo_vmware.api [None req-174a367c-d208-4d4f-a2c0-0d092ce927ed tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Task: {'id': task-1947719, 'name': PowerOnVM_Task, 'duration_secs': 0.621024} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1876.249626] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-174a367c-d208-4d4f-a2c0-0d092ce927ed tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] [instance: 8f133517-cff2-40c7-8333-a9116163313a] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1876.249626] env[62405]: INFO nova.compute.manager [None req-174a367c-d208-4d4f-a2c0-0d092ce927ed tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] [instance: 8f133517-cff2-40c7-8333-a9116163313a] Took 6.40 seconds to spawn the instance on the hypervisor. [ 1876.249888] env[62405]: DEBUG nova.compute.manager [None req-174a367c-d208-4d4f-a2c0-0d092ce927ed tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] [instance: 8f133517-cff2-40c7-8333-a9116163313a] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1876.250645] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c411609-ca93-4a65-8e23-f483dfdc604b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.283947] env[62405]: DEBUG nova.network.neutron [None req-bf5fbeee-54e7-4270-8309-84e1125402f3 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59fe34ab-c01d-4083-8bcd-ad6b4133a66f] Updating instance_info_cache with network_info: [{"id": "55e7f388-c46a-48fe-b363-c49bbbe7f6b4", "address": "fa:16:3e:df:2e:5b", "network": {"id": "ac0e1447-1c61-4770-8006-3a99edc76f93", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1648941742-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bb1da47e8b1a400fab7817d9e6b282ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "298bb8ef-4765-494c-b157-7a349218bd1e", "external-id": "nsx-vlan-transportzone-905", "segmentation_id": 905, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55e7f388-c4", "ovs_interfaceid": "55e7f388-c46a-48fe-b363-c49bbbe7f6b4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1876.381748] env[62405]: DEBUG nova.compute.manager [None req-4b7d658d-85cf-4c1f-b5d9-0ffe7c9aee52 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: 81aebf11-5d80-4a86-b232-3ecc5f3892c2] Stashing vm_state: active {{(pid=62405) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1876.681319] env[62405]: DEBUG oslo_concurrency.lockutils [None req-aaf55f6a-42fd-4537-9ca0-b62d12c69a21 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.399s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1876.681851] env[62405]: DEBUG nova.compute.manager [None req-aaf55f6a-42fd-4537-9ca0-b62d12c69a21 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 08d7be6c-0557-46af-ae8d-e1c68e878cae] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1876.685854] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 22.888s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1876.686045] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1876.686205] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62405) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1876.686501] env[62405]: DEBUG oslo_concurrency.lockutils [None req-84ba1ec7-6780-46b6-bc3d-a93321a3e6b4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.007s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1876.686709] env[62405]: DEBUG nova.objects.instance [None req-84ba1ec7-6780-46b6-bc3d-a93321a3e6b4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Lazy-loading 'resources' on Instance uuid 271cec64-e7b4-4a1b-a7d6-f3fd60086209 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1876.688601] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f73dfec5-aec2-4983-b1d1-123d660ffabf {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.705633] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfc3118c-08e2-43e7-8dd7-247e8f234f1e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.708406] env[62405]: DEBUG oslo_vmware.api [None req-3340a102-3259-439a-9dfa-ed4e27484546 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947711, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1876.710392] env[62405]: DEBUG nova.compute.manager [req-522dfa40-e696-479e-a2e2-4102945637b4 req-a6cda1ca-4c43-433b-9688-4d992061a939 service nova] [instance: 59fe34ab-c01d-4083-8bcd-ad6b4133a66f] Received event network-vif-plugged-55e7f388-c46a-48fe-b363-c49bbbe7f6b4 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1876.710932] env[62405]: DEBUG oslo_concurrency.lockutils [req-522dfa40-e696-479e-a2e2-4102945637b4 req-a6cda1ca-4c43-433b-9688-4d992061a939 service nova] Acquiring lock "59fe34ab-c01d-4083-8bcd-ad6b4133a66f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1876.710932] env[62405]: DEBUG oslo_concurrency.lockutils [req-522dfa40-e696-479e-a2e2-4102945637b4 req-a6cda1ca-4c43-433b-9688-4d992061a939 service nova] Lock "59fe34ab-c01d-4083-8bcd-ad6b4133a66f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1876.710932] env[62405]: DEBUG oslo_concurrency.lockutils [req-522dfa40-e696-479e-a2e2-4102945637b4 req-a6cda1ca-4c43-433b-9688-4d992061a939 service nova] Lock "59fe34ab-c01d-4083-8bcd-ad6b4133a66f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1876.710932] env[62405]: DEBUG nova.compute.manager [req-522dfa40-e696-479e-a2e2-4102945637b4 req-a6cda1ca-4c43-433b-9688-4d992061a939 service nova] [instance: 59fe34ab-c01d-4083-8bcd-ad6b4133a66f] No waiting events found dispatching network-vif-plugged-55e7f388-c46a-48fe-b363-c49bbbe7f6b4 {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1876.714493] env[62405]: WARNING nova.compute.manager [req-522dfa40-e696-479e-a2e2-4102945637b4 req-a6cda1ca-4c43-433b-9688-4d992061a939 service nova] [instance: 59fe34ab-c01d-4083-8bcd-ad6b4133a66f] Received unexpected event network-vif-plugged-55e7f388-c46a-48fe-b363-c49bbbe7f6b4 for instance with vm_state building and task_state spawning. [ 1876.714493] env[62405]: DEBUG nova.compute.manager [req-522dfa40-e696-479e-a2e2-4102945637b4 req-a6cda1ca-4c43-433b-9688-4d992061a939 service nova] [instance: 59fe34ab-c01d-4083-8bcd-ad6b4133a66f] Received event network-changed-55e7f388-c46a-48fe-b363-c49bbbe7f6b4 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1876.714493] env[62405]: DEBUG nova.compute.manager [req-522dfa40-e696-479e-a2e2-4102945637b4 req-a6cda1ca-4c43-433b-9688-4d992061a939 service nova] [instance: 59fe34ab-c01d-4083-8bcd-ad6b4133a66f] Refreshing instance network info cache due to event network-changed-55e7f388-c46a-48fe-b363-c49bbbe7f6b4. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1876.714493] env[62405]: DEBUG oslo_concurrency.lockutils [req-522dfa40-e696-479e-a2e2-4102945637b4 req-a6cda1ca-4c43-433b-9688-4d992061a939 service nova] Acquiring lock "refresh_cache-59fe34ab-c01d-4083-8bcd-ad6b4133a66f" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1876.726756] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fa58e2e-d1ce-47b3-b5f1-681f4f778248 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.737803] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbb75ec7-3cc8-4d69-ab19-a95c425f6d17 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.781220] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179551MB free_disk=171GB free_vcpus=48 pci_devices=None {{(pid=62405) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1876.781273] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1876.783597] env[62405]: INFO nova.compute.manager [None req-174a367c-d208-4d4f-a2c0-0d092ce927ed tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] [instance: 8f133517-cff2-40c7-8333-a9116163313a] Took 33.18 seconds to build instance. [ 1876.787776] env[62405]: DEBUG oslo_concurrency.lockutils [None req-bf5fbeee-54e7-4270-8309-84e1125402f3 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Releasing lock "refresh_cache-59fe34ab-c01d-4083-8bcd-ad6b4133a66f" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1876.787776] env[62405]: DEBUG nova.compute.manager [None req-bf5fbeee-54e7-4270-8309-84e1125402f3 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59fe34ab-c01d-4083-8bcd-ad6b4133a66f] Instance network_info: |[{"id": "55e7f388-c46a-48fe-b363-c49bbbe7f6b4", "address": "fa:16:3e:df:2e:5b", "network": {"id": "ac0e1447-1c61-4770-8006-3a99edc76f93", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1648941742-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bb1da47e8b1a400fab7817d9e6b282ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "298bb8ef-4765-494c-b157-7a349218bd1e", "external-id": "nsx-vlan-transportzone-905", "segmentation_id": 905, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55e7f388-c4", "ovs_interfaceid": "55e7f388-c46a-48fe-b363-c49bbbe7f6b4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1876.787776] env[62405]: DEBUG oslo_concurrency.lockutils [req-522dfa40-e696-479e-a2e2-4102945637b4 req-a6cda1ca-4c43-433b-9688-4d992061a939 service nova] Acquired lock "refresh_cache-59fe34ab-c01d-4083-8bcd-ad6b4133a66f" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1876.787776] env[62405]: DEBUG nova.network.neutron [req-522dfa40-e696-479e-a2e2-4102945637b4 req-a6cda1ca-4c43-433b-9688-4d992061a939 service nova] [instance: 59fe34ab-c01d-4083-8bcd-ad6b4133a66f] Refreshing network info cache for port 55e7f388-c46a-48fe-b363-c49bbbe7f6b4 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1876.788061] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-bf5fbeee-54e7-4270-8309-84e1125402f3 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59fe34ab-c01d-4083-8bcd-ad6b4133a66f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:df:2e:5b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '298bb8ef-4765-494c-b157-7a349218bd1e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '55e7f388-c46a-48fe-b363-c49bbbe7f6b4', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1876.800541] env[62405]: DEBUG oslo.service.loopingcall [None req-bf5fbeee-54e7-4270-8309-84e1125402f3 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1876.800541] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 59fe34ab-c01d-4083-8bcd-ad6b4133a66f] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1876.800541] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b2064dab-1aab-4a46-8d4c-55726b3fca55 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.823718] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1876.823718] env[62405]: value = "task-1947720" [ 1876.823718] env[62405]: _type = "Task" [ 1876.823718] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1876.831765] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947720, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1876.907716] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4b7d658d-85cf-4c1f-b5d9-0ffe7c9aee52 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1877.190120] env[62405]: DEBUG nova.compute.utils [None req-aaf55f6a-42fd-4537-9ca0-b62d12c69a21 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1877.191601] env[62405]: DEBUG nova.compute.manager [None req-aaf55f6a-42fd-4537-9ca0-b62d12c69a21 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 08d7be6c-0557-46af-ae8d-e1c68e878cae] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1877.191808] env[62405]: DEBUG nova.network.neutron [None req-aaf55f6a-42fd-4537-9ca0-b62d12c69a21 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 08d7be6c-0557-46af-ae8d-e1c68e878cae] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1877.206029] env[62405]: DEBUG oslo_vmware.api [None req-3340a102-3259-439a-9dfa-ed4e27484546 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947711, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1877.247012] env[62405]: DEBUG nova.policy [None req-aaf55f6a-42fd-4537-9ca0-b62d12c69a21 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f5f866535fb94dd0b0ddddddd7da60b2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '41626e27199f4370a2554bb243a72d41', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1877.289773] env[62405]: DEBUG oslo_concurrency.lockutils [None req-174a367c-d208-4d4f-a2c0-0d092ce927ed tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Lock "8f133517-cff2-40c7-8333-a9116163313a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.067s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1877.333628] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947720, 'name': CreateVM_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1877.542420] env[62405]: DEBUG nova.network.neutron [req-522dfa40-e696-479e-a2e2-4102945637b4 req-a6cda1ca-4c43-433b-9688-4d992061a939 service nova] [instance: 59fe34ab-c01d-4083-8bcd-ad6b4133a66f] Updated VIF entry in instance network info cache for port 55e7f388-c46a-48fe-b363-c49bbbe7f6b4. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1877.542685] env[62405]: DEBUG nova.network.neutron [req-522dfa40-e696-479e-a2e2-4102945637b4 req-a6cda1ca-4c43-433b-9688-4d992061a939 service nova] [instance: 59fe34ab-c01d-4083-8bcd-ad6b4133a66f] Updating instance_info_cache with network_info: [{"id": "55e7f388-c46a-48fe-b363-c49bbbe7f6b4", "address": "fa:16:3e:df:2e:5b", "network": {"id": "ac0e1447-1c61-4770-8006-3a99edc76f93", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1648941742-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bb1da47e8b1a400fab7817d9e6b282ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "298bb8ef-4765-494c-b157-7a349218bd1e", "external-id": "nsx-vlan-transportzone-905", "segmentation_id": 905, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55e7f388-c4", "ovs_interfaceid": "55e7f388-c46a-48fe-b363-c49bbbe7f6b4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1877.603303] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55900394-67e4-4276-9e18-bf19f9a31cbe {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.612329] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94e52cf5-3893-4661-9c91-1ddc39b6eafd {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.650388] env[62405]: DEBUG nova.network.neutron [None req-aaf55f6a-42fd-4537-9ca0-b62d12c69a21 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 08d7be6c-0557-46af-ae8d-e1c68e878cae] Successfully created port: 906bdab0-cfcb-43b1-8d01-63587d844b97 {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1877.652864] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a03e898b-29ac-46af-aeeb-817f109041b9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.661527] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b878d136-4c33-432e-bd72-f87113739d42 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.681176] env[62405]: DEBUG oslo_concurrency.lockutils [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Acquiring lock "b495f9e6-60c8-4509-a34f-2e7ed59b6d82" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1877.681442] env[62405]: DEBUG oslo_concurrency.lockutils [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Lock "b495f9e6-60c8-4509-a34f-2e7ed59b6d82" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1877.683367] env[62405]: DEBUG nova.compute.provider_tree [None req-84ba1ec7-6780-46b6-bc3d-a93321a3e6b4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1877.694959] env[62405]: DEBUG nova.compute.manager [None req-aaf55f6a-42fd-4537-9ca0-b62d12c69a21 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 08d7be6c-0557-46af-ae8d-e1c68e878cae] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1877.707900] env[62405]: DEBUG oslo_vmware.api [None req-3340a102-3259-439a-9dfa-ed4e27484546 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947711, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1877.791805] env[62405]: INFO nova.compute.manager [None req-ba987cca-0cd8-4aa3-98a6-5b3a973bb27e tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] [instance: 8f133517-cff2-40c7-8333-a9116163313a] Rebuilding instance [ 1877.834784] env[62405]: DEBUG nova.compute.manager [None req-ba987cca-0cd8-4aa3-98a6-5b3a973bb27e tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] [instance: 8f133517-cff2-40c7-8333-a9116163313a] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1877.835765] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-468a2478-9943-46aa-b2a3-67a0748bf8cc {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.843288] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947720, 'name': CreateVM_Task, 'duration_secs': 0.579611} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1877.843814] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 59fe34ab-c01d-4083-8bcd-ad6b4133a66f] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1877.844540] env[62405]: DEBUG oslo_concurrency.lockutils [None req-bf5fbeee-54e7-4270-8309-84e1125402f3 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1877.844714] env[62405]: DEBUG oslo_concurrency.lockutils [None req-bf5fbeee-54e7-4270-8309-84e1125402f3 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1877.845052] env[62405]: DEBUG oslo_concurrency.lockutils [None req-bf5fbeee-54e7-4270-8309-84e1125402f3 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1877.845325] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ed7be85a-be30-4d2c-be11-7f4e603f4ede {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.853925] env[62405]: DEBUG oslo_vmware.api [None req-bf5fbeee-54e7-4270-8309-84e1125402f3 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Waiting for the task: (returnval){ [ 1877.853925] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]527a81a6-3cc9-767a-5f55-be37ecbeadd2" [ 1877.853925] env[62405]: _type = "Task" [ 1877.853925] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1877.862017] env[62405]: DEBUG oslo_vmware.api [None req-bf5fbeee-54e7-4270-8309-84e1125402f3 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]527a81a6-3cc9-767a-5f55-be37ecbeadd2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1878.047154] env[62405]: DEBUG oslo_concurrency.lockutils [req-522dfa40-e696-479e-a2e2-4102945637b4 req-a6cda1ca-4c43-433b-9688-4d992061a939 service nova] Releasing lock "refresh_cache-59fe34ab-c01d-4083-8bcd-ad6b4133a66f" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1878.185735] env[62405]: DEBUG nova.compute.manager [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1878.189074] env[62405]: DEBUG nova.scheduler.client.report [None req-84ba1ec7-6780-46b6-bc3d-a93321a3e6b4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1878.209017] env[62405]: DEBUG oslo_vmware.api [None req-3340a102-3259-439a-9dfa-ed4e27484546 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947711, 'name': ReconfigVM_Task, 'duration_secs': 6.352616} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1878.209494] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3340a102-3259-439a-9dfa-ed4e27484546 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Releasing lock "9d97bf1d-6830-48b1-831b-bf2b52188f32" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1878.209745] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-3340a102-3259-439a-9dfa-ed4e27484546 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Reconfigured VM to detach interface {{(pid=62405) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1878.367430] env[62405]: DEBUG oslo_vmware.api [None req-bf5fbeee-54e7-4270-8309-84e1125402f3 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]527a81a6-3cc9-767a-5f55-be37ecbeadd2, 'name': SearchDatastore_Task, 'duration_secs': 0.019502} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1878.368883] env[62405]: DEBUG oslo_concurrency.lockutils [None req-bf5fbeee-54e7-4270-8309-84e1125402f3 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1878.368977] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-bf5fbeee-54e7-4270-8309-84e1125402f3 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59fe34ab-c01d-4083-8bcd-ad6b4133a66f] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1878.369337] env[62405]: DEBUG oslo_concurrency.lockutils [None req-bf5fbeee-54e7-4270-8309-84e1125402f3 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1878.369584] env[62405]: DEBUG oslo_concurrency.lockutils [None req-bf5fbeee-54e7-4270-8309-84e1125402f3 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1878.369839] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf5fbeee-54e7-4270-8309-84e1125402f3 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1878.370702] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-83cbb1f0-9ba9-478a-9c13-690cd2483b34 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.384072] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf5fbeee-54e7-4270-8309-84e1125402f3 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1878.384072] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-bf5fbeee-54e7-4270-8309-84e1125402f3 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1878.384072] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-154d3be7-5ce3-4f24-a1c4-b8406e20d4bf {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.387820] env[62405]: DEBUG oslo_vmware.api [None req-bf5fbeee-54e7-4270-8309-84e1125402f3 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Waiting for the task: (returnval){ [ 1878.387820] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52ca74fd-79ac-d307-f67f-73c545277495" [ 1878.387820] env[62405]: _type = "Task" [ 1878.387820] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1878.396211] env[62405]: DEBUG oslo_vmware.api [None req-bf5fbeee-54e7-4270-8309-84e1125402f3 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52ca74fd-79ac-d307-f67f-73c545277495, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1878.506520] env[62405]: DEBUG nova.compute.manager [req-3888035f-04d2-45ca-80b0-b61fb695e00b req-79cb87f5-8284-4c3f-8be5-72eda3315660 service nova] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Received event network-vif-deleted-a466989b-10e1-492c-a30a-33ba96b092ca {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1878.506780] env[62405]: INFO nova.compute.manager [req-3888035f-04d2-45ca-80b0-b61fb695e00b req-79cb87f5-8284-4c3f-8be5-72eda3315660 service nova] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Neutron deleted interface a466989b-10e1-492c-a30a-33ba96b092ca; detaching it from the instance and deleting it from the info cache [ 1878.508130] env[62405]: DEBUG nova.network.neutron [req-3888035f-04d2-45ca-80b0-b61fb695e00b req-79cb87f5-8284-4c3f-8be5-72eda3315660 service nova] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Updating instance_info_cache with network_info: [{"id": "09308517-a17c-48d3-b01f-fed73b19adfd", "address": "fa:16:3e:d4:17:23", "network": {"id": "8e7f7222-48db-4dd5-a9e8-9a6d2b598918", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1945720715-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.200", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba9083cddcc24345b6ea5d2cbbbec5ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap09308517-a1", "ovs_interfaceid": "09308517-a17c-48d3-b01f-fed73b19adfd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "bfef94b3-682e-48fb-8149-02040e229cfb", "address": "fa:16:3e:96:ab:a9", "network": {"id": "8e7f7222-48db-4dd5-a9e8-9a6d2b598918", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1945720715-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba9083cddcc24345b6ea5d2cbbbec5ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbfef94b3-68", "ovs_interfaceid": "bfef94b3-682e-48fb-8149-02040e229cfb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1878.696551] env[62405]: DEBUG oslo_concurrency.lockutils [None req-84ba1ec7-6780-46b6-bc3d-a93321a3e6b4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.009s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1878.700787] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b3aa5f3d-4f83-45ee-95dc-ac56a1f4e8ba tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.817s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1878.701112] env[62405]: DEBUG nova.objects.instance [None req-b3aa5f3d-4f83-45ee-95dc-ac56a1f4e8ba tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Lazy-loading 'resources' on Instance uuid 34ec55c6-1a7a-4ffa-8efd-9eedd7495d44 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1878.710680] env[62405]: DEBUG nova.compute.manager [None req-aaf55f6a-42fd-4537-9ca0-b62d12c69a21 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 08d7be6c-0557-46af-ae8d-e1c68e878cae] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1878.715681] env[62405]: DEBUG oslo_concurrency.lockutils [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1878.719587] env[62405]: INFO nova.scheduler.client.report [None req-84ba1ec7-6780-46b6-bc3d-a93321a3e6b4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Deleted allocations for instance 271cec64-e7b4-4a1b-a7d6-f3fd60086209 [ 1878.740822] env[62405]: DEBUG nova.virt.hardware [None req-aaf55f6a-42fd-4537-9ca0-b62d12c69a21 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1878.741071] env[62405]: DEBUG nova.virt.hardware [None req-aaf55f6a-42fd-4537-9ca0-b62d12c69a21 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1878.741237] env[62405]: DEBUG nova.virt.hardware [None req-aaf55f6a-42fd-4537-9ca0-b62d12c69a21 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1878.741427] env[62405]: DEBUG nova.virt.hardware [None req-aaf55f6a-42fd-4537-9ca0-b62d12c69a21 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1878.741580] env[62405]: DEBUG nova.virt.hardware [None req-aaf55f6a-42fd-4537-9ca0-b62d12c69a21 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1878.741731] env[62405]: DEBUG nova.virt.hardware [None req-aaf55f6a-42fd-4537-9ca0-b62d12c69a21 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1878.741944] env[62405]: DEBUG nova.virt.hardware [None req-aaf55f6a-42fd-4537-9ca0-b62d12c69a21 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1878.742123] env[62405]: DEBUG nova.virt.hardware [None req-aaf55f6a-42fd-4537-9ca0-b62d12c69a21 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1878.742304] env[62405]: DEBUG nova.virt.hardware [None req-aaf55f6a-42fd-4537-9ca0-b62d12c69a21 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1878.742466] env[62405]: DEBUG nova.virt.hardware [None req-aaf55f6a-42fd-4537-9ca0-b62d12c69a21 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1878.742646] env[62405]: DEBUG nova.virt.hardware [None req-aaf55f6a-42fd-4537-9ca0-b62d12c69a21 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1878.743522] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9978c05-9d8d-43c2-b49c-8ef2f11e8231 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.751860] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31c31433-2662-421c-9231-0bb08d548bb4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.856592] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba987cca-0cd8-4aa3-98a6-5b3a973bb27e tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] [instance: 8f133517-cff2-40c7-8333-a9116163313a] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1878.856993] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fcd622ca-f320-4336-b35c-0dc302d6040d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.864518] env[62405]: DEBUG oslo_vmware.api [None req-ba987cca-0cd8-4aa3-98a6-5b3a973bb27e tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Waiting for the task: (returnval){ [ 1878.864518] env[62405]: value = "task-1947721" [ 1878.864518] env[62405]: _type = "Task" [ 1878.864518] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1878.872799] env[62405]: DEBUG oslo_vmware.api [None req-ba987cca-0cd8-4aa3-98a6-5b3a973bb27e tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Task: {'id': task-1947721, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1878.899282] env[62405]: DEBUG oslo_vmware.api [None req-bf5fbeee-54e7-4270-8309-84e1125402f3 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52ca74fd-79ac-d307-f67f-73c545277495, 'name': SearchDatastore_Task, 'duration_secs': 0.009785} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1878.900074] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c9521fa1-fe27-4b82-a450-4e060f0e288f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.905262] env[62405]: DEBUG oslo_vmware.api [None req-bf5fbeee-54e7-4270-8309-84e1125402f3 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Waiting for the task: (returnval){ [ 1878.905262] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52b00a23-251e-d851-ca8b-310ff25b1779" [ 1878.905262] env[62405]: _type = "Task" [ 1878.905262] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1878.912990] env[62405]: DEBUG oslo_vmware.api [None req-bf5fbeee-54e7-4270-8309-84e1125402f3 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52b00a23-251e-d851-ca8b-310ff25b1779, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1879.011404] env[62405]: DEBUG oslo_concurrency.lockutils [req-3888035f-04d2-45ca-80b0-b61fb695e00b req-79cb87f5-8284-4c3f-8be5-72eda3315660 service nova] Acquiring lock "9d97bf1d-6830-48b1-831b-bf2b52188f32" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1879.011593] env[62405]: DEBUG oslo_concurrency.lockutils [req-3888035f-04d2-45ca-80b0-b61fb695e00b req-79cb87f5-8284-4c3f-8be5-72eda3315660 service nova] Acquired lock "9d97bf1d-6830-48b1-831b-bf2b52188f32" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1879.012500] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc2881e2-e546-4348-8bb9-5577a7e0ece3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.031420] env[62405]: DEBUG oslo_concurrency.lockutils [req-3888035f-04d2-45ca-80b0-b61fb695e00b req-79cb87f5-8284-4c3f-8be5-72eda3315660 service nova] Releasing lock "9d97bf1d-6830-48b1-831b-bf2b52188f32" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1879.031696] env[62405]: WARNING nova.compute.manager [req-3888035f-04d2-45ca-80b0-b61fb695e00b req-79cb87f5-8284-4c3f-8be5-72eda3315660 service nova] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Detach interface failed, port_id=a466989b-10e1-492c-a30a-33ba96b092ca, reason: No device with interface-id a466989b-10e1-492c-a30a-33ba96b092ca exists on VM: nova.exception.NotFound: No device with interface-id a466989b-10e1-492c-a30a-33ba96b092ca exists on VM [ 1879.235474] env[62405]: DEBUG oslo_concurrency.lockutils [None req-84ba1ec7-6780-46b6-bc3d-a93321a3e6b4 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Lock "271cec64-e7b4-4a1b-a7d6-f3fd60086209" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.463s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1879.239994] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "271cec64-e7b4-4a1b-a7d6-f3fd60086209" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 25.980s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1879.240205] env[62405]: INFO nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 271cec64-e7b4-4a1b-a7d6-f3fd60086209] During sync_power_state the instance has a pending task (deleting). Skip. [ 1879.240390] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "271cec64-e7b4-4a1b-a7d6-f3fd60086209" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1879.375089] env[62405]: DEBUG oslo_vmware.api [None req-ba987cca-0cd8-4aa3-98a6-5b3a973bb27e tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Task: {'id': task-1947721, 'name': PowerOffVM_Task, 'duration_secs': 0.169681} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1879.377357] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba987cca-0cd8-4aa3-98a6-5b3a973bb27e tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] [instance: 8f133517-cff2-40c7-8333-a9116163313a] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1879.377662] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-ba987cca-0cd8-4aa3-98a6-5b3a973bb27e tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] [instance: 8f133517-cff2-40c7-8333-a9116163313a] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1879.378581] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe4ac41f-cef7-4514-8930-94ced3bd2239 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.384966] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-ba987cca-0cd8-4aa3-98a6-5b3a973bb27e tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] [instance: 8f133517-cff2-40c7-8333-a9116163313a] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1879.385211] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a790a2ff-dc64-4e72-a827-19827e2d9546 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.400792] env[62405]: DEBUG nova.compute.manager [req-b40b67ac-704c-4a13-a59f-6f727870160a req-d282b945-d4ba-4c91-a8ed-39fb384adc6d service nova] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Received event network-vif-deleted-bfef94b3-682e-48fb-8149-02040e229cfb {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1879.400792] env[62405]: INFO nova.compute.manager [req-b40b67ac-704c-4a13-a59f-6f727870160a req-d282b945-d4ba-4c91-a8ed-39fb384adc6d service nova] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Neutron deleted interface bfef94b3-682e-48fb-8149-02040e229cfb; detaching it from the instance and deleting it from the info cache [ 1879.400792] env[62405]: DEBUG nova.network.neutron [req-b40b67ac-704c-4a13-a59f-6f727870160a req-d282b945-d4ba-4c91-a8ed-39fb384adc6d service nova] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Updating instance_info_cache with network_info: [{"id": "09308517-a17c-48d3-b01f-fed73b19adfd", "address": "fa:16:3e:d4:17:23", "network": {"id": "8e7f7222-48db-4dd5-a9e8-9a6d2b598918", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1945720715-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.200", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba9083cddcc24345b6ea5d2cbbbec5ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap09308517-a1", "ovs_interfaceid": "09308517-a17c-48d3-b01f-fed73b19adfd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1879.415305] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-ba987cca-0cd8-4aa3-98a6-5b3a973bb27e tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] [instance: 8f133517-cff2-40c7-8333-a9116163313a] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1879.415654] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-ba987cca-0cd8-4aa3-98a6-5b3a973bb27e tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] [instance: 8f133517-cff2-40c7-8333-a9116163313a] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1879.415891] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba987cca-0cd8-4aa3-98a6-5b3a973bb27e tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Deleting the datastore file [datastore1] 8f133517-cff2-40c7-8333-a9116163313a {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1879.417420] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2e3a51fa-7160-4c10-9e26-2683cb29c5c5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.425930] env[62405]: DEBUG oslo_vmware.api [None req-bf5fbeee-54e7-4270-8309-84e1125402f3 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52b00a23-251e-d851-ca8b-310ff25b1779, 'name': SearchDatastore_Task, 'duration_secs': 0.008632} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1879.426661] env[62405]: DEBUG oslo_concurrency.lockutils [None req-bf5fbeee-54e7-4270-8309-84e1125402f3 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1879.426957] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf5fbeee-54e7-4270-8309-84e1125402f3 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 59fe34ab-c01d-4083-8bcd-ad6b4133a66f/59fe34ab-c01d-4083-8bcd-ad6b4133a66f.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1879.428190] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c1e046b2-043a-4039-bedc-9046a225047d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.431469] env[62405]: DEBUG oslo_vmware.api [None req-ba987cca-0cd8-4aa3-98a6-5b3a973bb27e tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Waiting for the task: (returnval){ [ 1879.431469] env[62405]: value = "task-1947723" [ 1879.431469] env[62405]: _type = "Task" [ 1879.431469] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1879.440198] env[62405]: DEBUG oslo_vmware.api [None req-bf5fbeee-54e7-4270-8309-84e1125402f3 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Waiting for the task: (returnval){ [ 1879.440198] env[62405]: value = "task-1947724" [ 1879.440198] env[62405]: _type = "Task" [ 1879.440198] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1879.452407] env[62405]: DEBUG oslo_vmware.api [None req-ba987cca-0cd8-4aa3-98a6-5b3a973bb27e tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Task: {'id': task-1947723, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1879.460838] env[62405]: DEBUG oslo_vmware.api [None req-bf5fbeee-54e7-4270-8309-84e1125402f3 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947724, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1879.474112] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3340a102-3259-439a-9dfa-ed4e27484546 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquiring lock "refresh_cache-9d97bf1d-6830-48b1-831b-bf2b52188f32" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1879.477266] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3340a102-3259-439a-9dfa-ed4e27484546 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquired lock "refresh_cache-9d97bf1d-6830-48b1-831b-bf2b52188f32" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1879.477266] env[62405]: DEBUG nova.network.neutron [None req-3340a102-3259-439a-9dfa-ed4e27484546 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1879.540726] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39985cc4-caef-4c2b-8651-46b461949d4d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.549267] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-044992bc-3ea1-42bf-bbeb-f805b6ac078f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.580235] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03f10e6d-f103-4561-ac90-02d26ffd5959 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.587714] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5eef3d37-ec89-483c-9aa6-0d085a3623c4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.606110] env[62405]: DEBUG nova.compute.provider_tree [None req-b3aa5f3d-4f83-45ee-95dc-ac56a1f4e8ba tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1879.690541] env[62405]: DEBUG nova.network.neutron [None req-aaf55f6a-42fd-4537-9ca0-b62d12c69a21 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 08d7be6c-0557-46af-ae8d-e1c68e878cae] Successfully updated port: 906bdab0-cfcb-43b1-8d01-63587d844b97 {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1879.903505] env[62405]: DEBUG oslo_concurrency.lockutils [req-b40b67ac-704c-4a13-a59f-6f727870160a req-d282b945-d4ba-4c91-a8ed-39fb384adc6d service nova] Acquiring lock "9d97bf1d-6830-48b1-831b-bf2b52188f32" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1879.903921] env[62405]: DEBUG oslo_concurrency.lockutils [req-b40b67ac-704c-4a13-a59f-6f727870160a req-d282b945-d4ba-4c91-a8ed-39fb384adc6d service nova] Acquired lock "9d97bf1d-6830-48b1-831b-bf2b52188f32" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1879.904927] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16b08657-ece8-4956-a8c3-a79c7609094b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.923983] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0953f989-0650-4e31-b5a2-8f97e2235bd0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.945254] env[62405]: DEBUG oslo_concurrency.lockutils [None req-94307d94-a95d-4f55-a557-307a834f6cba tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquiring lock "9d97bf1d-6830-48b1-831b-bf2b52188f32" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1879.951306] env[62405]: DEBUG nova.virt.vmwareapi.vmops [req-b40b67ac-704c-4a13-a59f-6f727870160a req-d282b945-d4ba-4c91-a8ed-39fb384adc6d service nova] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Reconfiguring VM to detach interface {{(pid=62405) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1879.957468] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-05b1c3f0-4a6d-4f05-88b5-f8ab976ba19e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.986345] env[62405]: DEBUG oslo_vmware.api [None req-bf5fbeee-54e7-4270-8309-84e1125402f3 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947724, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.499674} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1879.986609] env[62405]: DEBUG oslo_vmware.api [None req-ba987cca-0cd8-4aa3-98a6-5b3a973bb27e tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Task: {'id': task-1947723, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.386999} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1879.988121] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf5fbeee-54e7-4270-8309-84e1125402f3 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 59fe34ab-c01d-4083-8bcd-ad6b4133a66f/59fe34ab-c01d-4083-8bcd-ad6b4133a66f.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1879.988452] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-bf5fbeee-54e7-4270-8309-84e1125402f3 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59fe34ab-c01d-4083-8bcd-ad6b4133a66f] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1879.988860] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba987cca-0cd8-4aa3-98a6-5b3a973bb27e tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1879.989112] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-ba987cca-0cd8-4aa3-98a6-5b3a973bb27e tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] [instance: 8f133517-cff2-40c7-8333-a9116163313a] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1879.989402] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-ba987cca-0cd8-4aa3-98a6-5b3a973bb27e tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] [instance: 8f133517-cff2-40c7-8333-a9116163313a] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1879.992083] env[62405]: DEBUG oslo_vmware.api [req-b40b67ac-704c-4a13-a59f-6f727870160a req-d282b945-d4ba-4c91-a8ed-39fb384adc6d service nova] Waiting for the task: (returnval){ [ 1879.992083] env[62405]: value = "task-1947725" [ 1879.992083] env[62405]: _type = "Task" [ 1879.992083] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1879.992947] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8c8d1c97-60c5-4648-9897-c4dd4fef78e1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.004235] env[62405]: DEBUG oslo_vmware.api [req-b40b67ac-704c-4a13-a59f-6f727870160a req-d282b945-d4ba-4c91-a8ed-39fb384adc6d service nova] Task: {'id': task-1947725, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1880.006337] env[62405]: DEBUG oslo_vmware.api [None req-bf5fbeee-54e7-4270-8309-84e1125402f3 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Waiting for the task: (returnval){ [ 1880.006337] env[62405]: value = "task-1947726" [ 1880.006337] env[62405]: _type = "Task" [ 1880.006337] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1880.015912] env[62405]: DEBUG oslo_vmware.api [None req-bf5fbeee-54e7-4270-8309-84e1125402f3 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947726, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1880.109767] env[62405]: DEBUG nova.scheduler.client.report [None req-b3aa5f3d-4f83-45ee-95dc-ac56a1f4e8ba tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1880.195670] env[62405]: DEBUG oslo_concurrency.lockutils [None req-aaf55f6a-42fd-4537-9ca0-b62d12c69a21 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquiring lock "refresh_cache-08d7be6c-0557-46af-ae8d-e1c68e878cae" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1880.195808] env[62405]: DEBUG oslo_concurrency.lockutils [None req-aaf55f6a-42fd-4537-9ca0-b62d12c69a21 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquired lock "refresh_cache-08d7be6c-0557-46af-ae8d-e1c68e878cae" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1880.195970] env[62405]: DEBUG nova.network.neutron [None req-aaf55f6a-42fd-4537-9ca0-b62d12c69a21 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 08d7be6c-0557-46af-ae8d-e1c68e878cae] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1880.319656] env[62405]: DEBUG nova.network.neutron [None req-3340a102-3259-439a-9dfa-ed4e27484546 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Updating instance_info_cache with network_info: [{"id": "09308517-a17c-48d3-b01f-fed73b19adfd", "address": "fa:16:3e:d4:17:23", "network": {"id": "8e7f7222-48db-4dd5-a9e8-9a6d2b598918", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1945720715-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.200", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba9083cddcc24345b6ea5d2cbbbec5ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap09308517-a1", "ovs_interfaceid": "09308517-a17c-48d3-b01f-fed73b19adfd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1880.506020] env[62405]: DEBUG oslo_vmware.api [req-b40b67ac-704c-4a13-a59f-6f727870160a req-d282b945-d4ba-4c91-a8ed-39fb384adc6d service nova] Task: {'id': task-1947725, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1880.515424] env[62405]: DEBUG oslo_vmware.api [None req-bf5fbeee-54e7-4270-8309-84e1125402f3 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947726, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071774} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1880.515542] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-bf5fbeee-54e7-4270-8309-84e1125402f3 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59fe34ab-c01d-4083-8bcd-ad6b4133a66f] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1880.516401] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be00e354-5b8b-4d10-9626-fc844fb76abe {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.539223] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf5fbeee-54e7-4270-8309-84e1125402f3 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59fe34ab-c01d-4083-8bcd-ad6b4133a66f] Reconfiguring VM instance instance-00000055 to attach disk [datastore1] 59fe34ab-c01d-4083-8bcd-ad6b4133a66f/59fe34ab-c01d-4083-8bcd-ad6b4133a66f.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1880.540215] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bb96548e-4d47-43c7-89b0-5c68bf9bfca2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.560377] env[62405]: DEBUG oslo_vmware.api [None req-bf5fbeee-54e7-4270-8309-84e1125402f3 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Waiting for the task: (returnval){ [ 1880.560377] env[62405]: value = "task-1947727" [ 1880.560377] env[62405]: _type = "Task" [ 1880.560377] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1880.569099] env[62405]: DEBUG oslo_vmware.api [None req-bf5fbeee-54e7-4270-8309-84e1125402f3 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947727, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1880.615057] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b3aa5f3d-4f83-45ee-95dc-ac56a1f4e8ba tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.914s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1880.617643] env[62405]: DEBUG oslo_concurrency.lockutils [None req-1d8dc834-5176-453e-b8fa-4c3e5238a3f6 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.110s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1880.617924] env[62405]: DEBUG nova.objects.instance [None req-1d8dc834-5176-453e-b8fa-4c3e5238a3f6 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Lazy-loading 'resources' on Instance uuid 7256b956-e41a-40ec-a687-a129a8bafcb6 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1880.639759] env[62405]: INFO nova.scheduler.client.report [None req-b3aa5f3d-4f83-45ee-95dc-ac56a1f4e8ba tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Deleted allocations for instance 34ec55c6-1a7a-4ffa-8efd-9eedd7495d44 [ 1880.728794] env[62405]: DEBUG nova.network.neutron [None req-aaf55f6a-42fd-4537-9ca0-b62d12c69a21 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 08d7be6c-0557-46af-ae8d-e1c68e878cae] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1880.824947] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3340a102-3259-439a-9dfa-ed4e27484546 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Releasing lock "refresh_cache-9d97bf1d-6830-48b1-831b-bf2b52188f32" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1880.919387] env[62405]: DEBUG nova.network.neutron [None req-aaf55f6a-42fd-4537-9ca0-b62d12c69a21 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 08d7be6c-0557-46af-ae8d-e1c68e878cae] Updating instance_info_cache with network_info: [{"id": "906bdab0-cfcb-43b1-8d01-63587d844b97", "address": "fa:16:3e:b6:78:b5", "network": {"id": "7398b956-045a-440c-b8fc-34bad57dbc27", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1969082667-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "41626e27199f4370a2554bb243a72d41", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "50171613-b419-45e3-9ada-fcb6cd921428", "external-id": "nsx-vlan-transportzone-914", "segmentation_id": 914, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap906bdab0-cf", "ovs_interfaceid": "906bdab0-cfcb-43b1-8d01-63587d844b97", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1881.010813] env[62405]: DEBUG oslo_vmware.api [req-b40b67ac-704c-4a13-a59f-6f727870160a req-d282b945-d4ba-4c91-a8ed-39fb384adc6d service nova] Task: {'id': task-1947725, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1881.029019] env[62405]: DEBUG nova.virt.hardware [None req-ba987cca-0cd8-4aa3-98a6-5b3a973bb27e tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1881.029333] env[62405]: DEBUG nova.virt.hardware [None req-ba987cca-0cd8-4aa3-98a6-5b3a973bb27e tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1881.029508] env[62405]: DEBUG nova.virt.hardware [None req-ba987cca-0cd8-4aa3-98a6-5b3a973bb27e tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1881.030064] env[62405]: DEBUG nova.virt.hardware [None req-ba987cca-0cd8-4aa3-98a6-5b3a973bb27e tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1881.030064] env[62405]: DEBUG nova.virt.hardware [None req-ba987cca-0cd8-4aa3-98a6-5b3a973bb27e tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1881.030197] env[62405]: DEBUG nova.virt.hardware [None req-ba987cca-0cd8-4aa3-98a6-5b3a973bb27e tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1881.030359] env[62405]: DEBUG nova.virt.hardware [None req-ba987cca-0cd8-4aa3-98a6-5b3a973bb27e tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1881.030610] env[62405]: DEBUG nova.virt.hardware [None req-ba987cca-0cd8-4aa3-98a6-5b3a973bb27e tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1881.030862] env[62405]: DEBUG nova.virt.hardware [None req-ba987cca-0cd8-4aa3-98a6-5b3a973bb27e tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1881.031114] env[62405]: DEBUG nova.virt.hardware [None req-ba987cca-0cd8-4aa3-98a6-5b3a973bb27e tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1881.031413] env[62405]: DEBUG nova.virt.hardware [None req-ba987cca-0cd8-4aa3-98a6-5b3a973bb27e tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1881.032393] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b16bae6d-6641-4410-95cc-5d68c0fc0e23 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.041147] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15cea757-d6a7-4c87-9be1-147febdc6c77 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.055827] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-ba987cca-0cd8-4aa3-98a6-5b3a973bb27e tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] [instance: 8f133517-cff2-40c7-8333-a9116163313a] Instance VIF info [] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1881.061599] env[62405]: DEBUG oslo.service.loopingcall [None req-ba987cca-0cd8-4aa3-98a6-5b3a973bb27e tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1881.062271] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8f133517-cff2-40c7-8333-a9116163313a] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1881.065354] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2789bd0e-0983-432f-aae0-bee1aa7f2e9c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.086246] env[62405]: DEBUG oslo_vmware.api [None req-bf5fbeee-54e7-4270-8309-84e1125402f3 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947727, 'name': ReconfigVM_Task, 'duration_secs': 0.282583} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1881.087656] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf5fbeee-54e7-4270-8309-84e1125402f3 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59fe34ab-c01d-4083-8bcd-ad6b4133a66f] Reconfigured VM instance instance-00000055 to attach disk [datastore1] 59fe34ab-c01d-4083-8bcd-ad6b4133a66f/59fe34ab-c01d-4083-8bcd-ad6b4133a66f.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1881.088295] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1881.088295] env[62405]: value = "task-1947728" [ 1881.088295] env[62405]: _type = "Task" [ 1881.088295] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1881.088514] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7cadc0a6-33cf-4832-aab4-f1ccdd2b09ac {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.099284] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947728, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1881.100835] env[62405]: DEBUG oslo_vmware.api [None req-bf5fbeee-54e7-4270-8309-84e1125402f3 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Waiting for the task: (returnval){ [ 1881.100835] env[62405]: value = "task-1947729" [ 1881.100835] env[62405]: _type = "Task" [ 1881.100835] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1881.109592] env[62405]: DEBUG oslo_vmware.api [None req-bf5fbeee-54e7-4270-8309-84e1125402f3 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947729, 'name': Rename_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1881.150513] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b3aa5f3d-4f83-45ee-95dc-ac56a1f4e8ba tempest-ServersWithSpecificFlavorTestJSON-1869880636 tempest-ServersWithSpecificFlavorTestJSON-1869880636-project-member] Lock "34ec55c6-1a7a-4ffa-8efd-9eedd7495d44" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.850s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1881.151601] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "34ec55c6-1a7a-4ffa-8efd-9eedd7495d44" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 27.896s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1881.151843] env[62405]: INFO nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 34ec55c6-1a7a-4ffa-8efd-9eedd7495d44] During sync_power_state the instance has a pending task (deleting). Skip. [ 1881.152216] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "34ec55c6-1a7a-4ffa-8efd-9eedd7495d44" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1881.329733] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3340a102-3259-439a-9dfa-ed4e27484546 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Lock "interface-9d97bf1d-6830-48b1-831b-bf2b52188f32-a466989b-10e1-492c-a30a-33ba96b092ca" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 10.234s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1881.424518] env[62405]: DEBUG oslo_concurrency.lockutils [None req-aaf55f6a-42fd-4537-9ca0-b62d12c69a21 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Releasing lock "refresh_cache-08d7be6c-0557-46af-ae8d-e1c68e878cae" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1881.424835] env[62405]: DEBUG nova.compute.manager [None req-aaf55f6a-42fd-4537-9ca0-b62d12c69a21 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 08d7be6c-0557-46af-ae8d-e1c68e878cae] Instance network_info: |[{"id": "906bdab0-cfcb-43b1-8d01-63587d844b97", "address": "fa:16:3e:b6:78:b5", "network": {"id": "7398b956-045a-440c-b8fc-34bad57dbc27", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1969082667-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "41626e27199f4370a2554bb243a72d41", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "50171613-b419-45e3-9ada-fcb6cd921428", "external-id": "nsx-vlan-transportzone-914", "segmentation_id": 914, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap906bdab0-cf", "ovs_interfaceid": "906bdab0-cfcb-43b1-8d01-63587d844b97", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1881.425779] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-aaf55f6a-42fd-4537-9ca0-b62d12c69a21 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 08d7be6c-0557-46af-ae8d-e1c68e878cae] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b6:78:b5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '50171613-b419-45e3-9ada-fcb6cd921428', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '906bdab0-cfcb-43b1-8d01-63587d844b97', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1881.436120] env[62405]: DEBUG oslo.service.loopingcall [None req-aaf55f6a-42fd-4537-9ca0-b62d12c69a21 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1881.436369] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 08d7be6c-0557-46af-ae8d-e1c68e878cae] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1881.436599] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e3e76a21-dd6f-4a6b-bfa3-4a5f75c1057d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.458583] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1881.458583] env[62405]: value = "task-1947730" [ 1881.458583] env[62405]: _type = "Task" [ 1881.458583] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1881.468778] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947730, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1881.509240] env[62405]: DEBUG oslo_vmware.api [req-b40b67ac-704c-4a13-a59f-6f727870160a req-d282b945-d4ba-4c91-a8ed-39fb384adc6d service nova] Task: {'id': task-1947725, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1881.512541] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e900d5e5-eda9-4e36-9763-152127bfb6ec {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.519771] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7dc02b6-b513-4593-9509-a4f7b096d9a8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.525226] env[62405]: DEBUG nova.compute.manager [req-8a732a1f-347c-4ac6-883e-62994e27074f req-ba603297-8a99-4ed4-9be2-69a710ab95a7 service nova] [instance: 08d7be6c-0557-46af-ae8d-e1c68e878cae] Received event network-vif-plugged-906bdab0-cfcb-43b1-8d01-63587d844b97 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1881.525475] env[62405]: DEBUG oslo_concurrency.lockutils [req-8a732a1f-347c-4ac6-883e-62994e27074f req-ba603297-8a99-4ed4-9be2-69a710ab95a7 service nova] Acquiring lock "08d7be6c-0557-46af-ae8d-e1c68e878cae-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1881.525764] env[62405]: DEBUG oslo_concurrency.lockutils [req-8a732a1f-347c-4ac6-883e-62994e27074f req-ba603297-8a99-4ed4-9be2-69a710ab95a7 service nova] Lock "08d7be6c-0557-46af-ae8d-e1c68e878cae-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1881.525911] env[62405]: DEBUG oslo_concurrency.lockutils [req-8a732a1f-347c-4ac6-883e-62994e27074f req-ba603297-8a99-4ed4-9be2-69a710ab95a7 service nova] Lock "08d7be6c-0557-46af-ae8d-e1c68e878cae-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1881.526027] env[62405]: DEBUG nova.compute.manager [req-8a732a1f-347c-4ac6-883e-62994e27074f req-ba603297-8a99-4ed4-9be2-69a710ab95a7 service nova] [instance: 08d7be6c-0557-46af-ae8d-e1c68e878cae] No waiting events found dispatching network-vif-plugged-906bdab0-cfcb-43b1-8d01-63587d844b97 {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1881.526198] env[62405]: WARNING nova.compute.manager [req-8a732a1f-347c-4ac6-883e-62994e27074f req-ba603297-8a99-4ed4-9be2-69a710ab95a7 service nova] [instance: 08d7be6c-0557-46af-ae8d-e1c68e878cae] Received unexpected event network-vif-plugged-906bdab0-cfcb-43b1-8d01-63587d844b97 for instance with vm_state building and task_state spawning. [ 1881.526351] env[62405]: DEBUG nova.compute.manager [req-8a732a1f-347c-4ac6-883e-62994e27074f req-ba603297-8a99-4ed4-9be2-69a710ab95a7 service nova] [instance: 08d7be6c-0557-46af-ae8d-e1c68e878cae] Received event network-changed-906bdab0-cfcb-43b1-8d01-63587d844b97 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1881.526504] env[62405]: DEBUG nova.compute.manager [req-8a732a1f-347c-4ac6-883e-62994e27074f req-ba603297-8a99-4ed4-9be2-69a710ab95a7 service nova] [instance: 08d7be6c-0557-46af-ae8d-e1c68e878cae] Refreshing instance network info cache due to event network-changed-906bdab0-cfcb-43b1-8d01-63587d844b97. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1881.526688] env[62405]: DEBUG oslo_concurrency.lockutils [req-8a732a1f-347c-4ac6-883e-62994e27074f req-ba603297-8a99-4ed4-9be2-69a710ab95a7 service nova] Acquiring lock "refresh_cache-08d7be6c-0557-46af-ae8d-e1c68e878cae" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1881.526823] env[62405]: DEBUG oslo_concurrency.lockutils [req-8a732a1f-347c-4ac6-883e-62994e27074f req-ba603297-8a99-4ed4-9be2-69a710ab95a7 service nova] Acquired lock "refresh_cache-08d7be6c-0557-46af-ae8d-e1c68e878cae" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1881.526977] env[62405]: DEBUG nova.network.neutron [req-8a732a1f-347c-4ac6-883e-62994e27074f req-ba603297-8a99-4ed4-9be2-69a710ab95a7 service nova] [instance: 08d7be6c-0557-46af-ae8d-e1c68e878cae] Refreshing network info cache for port 906bdab0-cfcb-43b1-8d01-63587d844b97 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1881.555472] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36ac142c-1479-4ebf-a5be-a173ca7332cc {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.565985] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c15ffd43-8442-4900-b7d1-c1871deb4d7e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.580331] env[62405]: DEBUG nova.compute.provider_tree [None req-1d8dc834-5176-453e-b8fa-4c3e5238a3f6 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1881.599143] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947728, 'name': CreateVM_Task, 'duration_secs': 0.311763} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1881.599261] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8f133517-cff2-40c7-8333-a9116163313a] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1881.599673] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ba987cca-0cd8-4aa3-98a6-5b3a973bb27e tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1881.599857] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ba987cca-0cd8-4aa3-98a6-5b3a973bb27e tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1881.600382] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ba987cca-0cd8-4aa3-98a6-5b3a973bb27e tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1881.600434] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6469809b-4b23-4747-9a8f-cac78966d416 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.607759] env[62405]: DEBUG oslo_vmware.api [None req-ba987cca-0cd8-4aa3-98a6-5b3a973bb27e tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Waiting for the task: (returnval){ [ 1881.607759] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52507f2c-a9aa-aa8f-3248-4d49acfcf40e" [ 1881.607759] env[62405]: _type = "Task" [ 1881.607759] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1881.611058] env[62405]: DEBUG oslo_vmware.api [None req-bf5fbeee-54e7-4270-8309-84e1125402f3 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947729, 'name': Rename_Task, 'duration_secs': 0.188767} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1881.613939] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf5fbeee-54e7-4270-8309-84e1125402f3 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59fe34ab-c01d-4083-8bcd-ad6b4133a66f] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1881.614221] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e77a5584-0699-475a-af3c-c5b247401744 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.621167] env[62405]: DEBUG oslo_vmware.api [None req-ba987cca-0cd8-4aa3-98a6-5b3a973bb27e tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52507f2c-a9aa-aa8f-3248-4d49acfcf40e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1881.623127] env[62405]: DEBUG oslo_vmware.api [None req-bf5fbeee-54e7-4270-8309-84e1125402f3 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Waiting for the task: (returnval){ [ 1881.623127] env[62405]: value = "task-1947731" [ 1881.623127] env[62405]: _type = "Task" [ 1881.623127] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1881.630233] env[62405]: DEBUG oslo_vmware.api [None req-bf5fbeee-54e7-4270-8309-84e1125402f3 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947731, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1881.969074] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947730, 'name': CreateVM_Task, 'duration_secs': 0.41515} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1881.969404] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 08d7be6c-0557-46af-ae8d-e1c68e878cae] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1881.970094] env[62405]: DEBUG oslo_concurrency.lockutils [None req-aaf55f6a-42fd-4537-9ca0-b62d12c69a21 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1882.009700] env[62405]: DEBUG oslo_vmware.api [req-b40b67ac-704c-4a13-a59f-6f727870160a req-d282b945-d4ba-4c91-a8ed-39fb384adc6d service nova] Task: {'id': task-1947725, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1882.083579] env[62405]: DEBUG nova.scheduler.client.report [None req-1d8dc834-5176-453e-b8fa-4c3e5238a3f6 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1882.122077] env[62405]: DEBUG oslo_vmware.api [None req-ba987cca-0cd8-4aa3-98a6-5b3a973bb27e tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52507f2c-a9aa-aa8f-3248-4d49acfcf40e, 'name': SearchDatastore_Task, 'duration_secs': 0.013579} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1882.122529] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ba987cca-0cd8-4aa3-98a6-5b3a973bb27e tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1882.122826] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-ba987cca-0cd8-4aa3-98a6-5b3a973bb27e tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] [instance: 8f133517-cff2-40c7-8333-a9116163313a] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1882.123124] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ba987cca-0cd8-4aa3-98a6-5b3a973bb27e tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1882.123348] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ba987cca-0cd8-4aa3-98a6-5b3a973bb27e tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1882.123576] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba987cca-0cd8-4aa3-98a6-5b3a973bb27e tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1882.123875] env[62405]: DEBUG oslo_concurrency.lockutils [None req-aaf55f6a-42fd-4537-9ca0-b62d12c69a21 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1882.124208] env[62405]: DEBUG oslo_concurrency.lockutils [None req-aaf55f6a-42fd-4537-9ca0-b62d12c69a21 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1882.124494] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fa52d03d-068f-420a-88a0-618702737667 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.126336] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-169ebe52-60ca-4478-9904-19a2d295969e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.139078] env[62405]: DEBUG oslo_vmware.api [None req-aaf55f6a-42fd-4537-9ca0-b62d12c69a21 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for the task: (returnval){ [ 1882.139078] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52f8267e-34c9-2224-9c68-7631b057b7c2" [ 1882.139078] env[62405]: _type = "Task" [ 1882.139078] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1882.142173] env[62405]: DEBUG oslo_vmware.api [None req-bf5fbeee-54e7-4270-8309-84e1125402f3 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947731, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1882.147241] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba987cca-0cd8-4aa3-98a6-5b3a973bb27e tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1882.147241] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-ba987cca-0cd8-4aa3-98a6-5b3a973bb27e tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1882.147438] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2de25bf0-0f29-4c8a-b7ab-eeba693717c5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.155678] env[62405]: DEBUG oslo_vmware.api [None req-ba987cca-0cd8-4aa3-98a6-5b3a973bb27e tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Waiting for the task: (returnval){ [ 1882.155678] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52ddd0ae-d390-9671-1ea0-7046a5101333" [ 1882.155678] env[62405]: _type = "Task" [ 1882.155678] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1882.159590] env[62405]: DEBUG oslo_vmware.api [None req-aaf55f6a-42fd-4537-9ca0-b62d12c69a21 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52f8267e-34c9-2224-9c68-7631b057b7c2, 'name': SearchDatastore_Task, 'duration_secs': 0.011671} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1882.162414] env[62405]: DEBUG oslo_concurrency.lockutils [None req-aaf55f6a-42fd-4537-9ca0-b62d12c69a21 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1882.162657] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-aaf55f6a-42fd-4537-9ca0-b62d12c69a21 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 08d7be6c-0557-46af-ae8d-e1c68e878cae] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1882.162868] env[62405]: DEBUG oslo_concurrency.lockutils [None req-aaf55f6a-42fd-4537-9ca0-b62d12c69a21 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1882.167880] env[62405]: DEBUG oslo_vmware.api [None req-ba987cca-0cd8-4aa3-98a6-5b3a973bb27e tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52ddd0ae-d390-9671-1ea0-7046a5101333, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1882.323715] env[62405]: DEBUG nova.network.neutron [req-8a732a1f-347c-4ac6-883e-62994e27074f req-ba603297-8a99-4ed4-9be2-69a710ab95a7 service nova] [instance: 08d7be6c-0557-46af-ae8d-e1c68e878cae] Updated VIF entry in instance network info cache for port 906bdab0-cfcb-43b1-8d01-63587d844b97. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1882.324154] env[62405]: DEBUG nova.network.neutron [req-8a732a1f-347c-4ac6-883e-62994e27074f req-ba603297-8a99-4ed4-9be2-69a710ab95a7 service nova] [instance: 08d7be6c-0557-46af-ae8d-e1c68e878cae] Updating instance_info_cache with network_info: [{"id": "906bdab0-cfcb-43b1-8d01-63587d844b97", "address": "fa:16:3e:b6:78:b5", "network": {"id": "7398b956-045a-440c-b8fc-34bad57dbc27", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1969082667-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "41626e27199f4370a2554bb243a72d41", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "50171613-b419-45e3-9ada-fcb6cd921428", "external-id": "nsx-vlan-transportzone-914", "segmentation_id": 914, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap906bdab0-cf", "ovs_interfaceid": "906bdab0-cfcb-43b1-8d01-63587d844b97", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1882.512334] env[62405]: DEBUG oslo_vmware.api [req-b40b67ac-704c-4a13-a59f-6f727870160a req-d282b945-d4ba-4c91-a8ed-39fb384adc6d service nova] Task: {'id': task-1947725, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1882.591177] env[62405]: DEBUG oslo_concurrency.lockutils [None req-1d8dc834-5176-453e-b8fa-4c3e5238a3f6 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.973s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1882.592740] env[62405]: DEBUG oslo_concurrency.lockutils [None req-bada42d0-8459-4144-ae42-917e90422d8b tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.446s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1882.592984] env[62405]: DEBUG nova.objects.instance [None req-bada42d0-8459-4144-ae42-917e90422d8b tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Lazy-loading 'resources' on Instance uuid 65cd4af4-30cf-4435-8f32-501db450905f {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1882.611874] env[62405]: INFO nova.scheduler.client.report [None req-1d8dc834-5176-453e-b8fa-4c3e5238a3f6 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Deleted allocations for instance 7256b956-e41a-40ec-a687-a129a8bafcb6 [ 1882.634558] env[62405]: DEBUG oslo_vmware.api [None req-bf5fbeee-54e7-4270-8309-84e1125402f3 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947731, 'name': PowerOnVM_Task, 'duration_secs': 0.53813} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1882.635273] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf5fbeee-54e7-4270-8309-84e1125402f3 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59fe34ab-c01d-4083-8bcd-ad6b4133a66f] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1882.635273] env[62405]: INFO nova.compute.manager [None req-bf5fbeee-54e7-4270-8309-84e1125402f3 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59fe34ab-c01d-4083-8bcd-ad6b4133a66f] Took 8.34 seconds to spawn the instance on the hypervisor. [ 1882.635273] env[62405]: DEBUG nova.compute.manager [None req-bf5fbeee-54e7-4270-8309-84e1125402f3 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59fe34ab-c01d-4083-8bcd-ad6b4133a66f] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1882.635968] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-439daabe-1c57-4f83-80b2-8affbfac3f93 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.671166] env[62405]: DEBUG oslo_vmware.api [None req-ba987cca-0cd8-4aa3-98a6-5b3a973bb27e tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52ddd0ae-d390-9671-1ea0-7046a5101333, 'name': SearchDatastore_Task, 'duration_secs': 0.013516} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1882.675017] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-23a3717c-1534-4cdf-aa6b-7c67d288f2ec {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.679764] env[62405]: DEBUG oslo_vmware.api [None req-ba987cca-0cd8-4aa3-98a6-5b3a973bb27e tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Waiting for the task: (returnval){ [ 1882.679764] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]527d8e69-e06a-656b-de97-d58f4a7cf2fd" [ 1882.679764] env[62405]: _type = "Task" [ 1882.679764] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1882.689496] env[62405]: DEBUG oslo_vmware.api [None req-ba987cca-0cd8-4aa3-98a6-5b3a973bb27e tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]527d8e69-e06a-656b-de97-d58f4a7cf2fd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1882.830696] env[62405]: DEBUG oslo_concurrency.lockutils [req-8a732a1f-347c-4ac6-883e-62994e27074f req-ba603297-8a99-4ed4-9be2-69a710ab95a7 service nova] Releasing lock "refresh_cache-08d7be6c-0557-46af-ae8d-e1c68e878cae" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1883.012341] env[62405]: DEBUG oslo_vmware.api [req-b40b67ac-704c-4a13-a59f-6f727870160a req-d282b945-d4ba-4c91-a8ed-39fb384adc6d service nova] Task: {'id': task-1947725, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1883.119216] env[62405]: DEBUG oslo_concurrency.lockutils [None req-1d8dc834-5176-453e-b8fa-4c3e5238a3f6 tempest-ListServerFiltersTestJSON-486576907 tempest-ListServerFiltersTestJSON-486576907-project-member] Lock "7256b956-e41a-40ec-a687-a129a8bafcb6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.126s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1883.155548] env[62405]: INFO nova.compute.manager [None req-bf5fbeee-54e7-4270-8309-84e1125402f3 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59fe34ab-c01d-4083-8bcd-ad6b4133a66f] Took 36.12 seconds to build instance. [ 1883.189660] env[62405]: DEBUG oslo_vmware.api [None req-ba987cca-0cd8-4aa3-98a6-5b3a973bb27e tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]527d8e69-e06a-656b-de97-d58f4a7cf2fd, 'name': SearchDatastore_Task, 'duration_secs': 0.032436} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1883.192203] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ba987cca-0cd8-4aa3-98a6-5b3a973bb27e tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1883.192477] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba987cca-0cd8-4aa3-98a6-5b3a973bb27e tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 8f133517-cff2-40c7-8333-a9116163313a/8f133517-cff2-40c7-8333-a9116163313a.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1883.193372] env[62405]: DEBUG oslo_concurrency.lockutils [None req-aaf55f6a-42fd-4537-9ca0-b62d12c69a21 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1883.193503] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-aaf55f6a-42fd-4537-9ca0-b62d12c69a21 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1883.193750] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7da3a378-877a-47ce-b045-22930ec04a3a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.195669] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3c147344-683f-4cb6-8646-98443538a28d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.203483] env[62405]: DEBUG oslo_vmware.api [None req-ba987cca-0cd8-4aa3-98a6-5b3a973bb27e tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Waiting for the task: (returnval){ [ 1883.203483] env[62405]: value = "task-1947732" [ 1883.203483] env[62405]: _type = "Task" [ 1883.203483] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1883.207100] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-aaf55f6a-42fd-4537-9ca0-b62d12c69a21 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1883.207287] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-aaf55f6a-42fd-4537-9ca0-b62d12c69a21 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1883.208477] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d6a3204d-3bc0-4672-a73c-4130023ade0a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.214407] env[62405]: DEBUG oslo_vmware.api [None req-ba987cca-0cd8-4aa3-98a6-5b3a973bb27e tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Task: {'id': task-1947732, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1883.221132] env[62405]: DEBUG oslo_vmware.api [None req-aaf55f6a-42fd-4537-9ca0-b62d12c69a21 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for the task: (returnval){ [ 1883.221132] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]521740a5-7204-4792-d027-dc43abff6913" [ 1883.221132] env[62405]: _type = "Task" [ 1883.221132] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1883.228241] env[62405]: DEBUG oslo_vmware.api [None req-aaf55f6a-42fd-4537-9ca0-b62d12c69a21 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]521740a5-7204-4792-d027-dc43abff6913, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1883.431659] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a26ea6ee-aea6-4fe6-b37b-e8e8ce2633f8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.439585] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cbca8a1-0174-4206-97b3-b1baa143e7a2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.471558] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-853ee3ec-69ec-41b6-a348-0b8523719b24 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.478934] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bd90bc5-9a72-4063-91c0-4686c625f0a6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.492358] env[62405]: DEBUG nova.compute.provider_tree [None req-bada42d0-8459-4144-ae42-917e90422d8b tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1883.511412] env[62405]: DEBUG oslo_vmware.api [req-b40b67ac-704c-4a13-a59f-6f727870160a req-d282b945-d4ba-4c91-a8ed-39fb384adc6d service nova] Task: {'id': task-1947725, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1883.657953] env[62405]: DEBUG oslo_concurrency.lockutils [None req-bf5fbeee-54e7-4270-8309-84e1125402f3 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Lock "59fe34ab-c01d-4083-8bcd-ad6b4133a66f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.628s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1883.717356] env[62405]: DEBUG oslo_vmware.api [None req-ba987cca-0cd8-4aa3-98a6-5b3a973bb27e tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Task: {'id': task-1947732, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1883.731406] env[62405]: DEBUG oslo_vmware.api [None req-aaf55f6a-42fd-4537-9ca0-b62d12c69a21 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]521740a5-7204-4792-d027-dc43abff6913, 'name': SearchDatastore_Task, 'duration_secs': 0.041164} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1883.732494] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-89b55568-11e4-4e16-a94f-37dd1d1edae4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.738842] env[62405]: DEBUG oslo_vmware.api [None req-aaf55f6a-42fd-4537-9ca0-b62d12c69a21 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for the task: (returnval){ [ 1883.738842] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52c63cc9-2bad-46b7-bbd6-57afa4dd9349" [ 1883.738842] env[62405]: _type = "Task" [ 1883.738842] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1883.748724] env[62405]: DEBUG oslo_vmware.api [None req-aaf55f6a-42fd-4537-9ca0-b62d12c69a21 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52c63cc9-2bad-46b7-bbd6-57afa4dd9349, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1883.997308] env[62405]: DEBUG nova.scheduler.client.report [None req-bada42d0-8459-4144-ae42-917e90422d8b tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1884.018663] env[62405]: DEBUG oslo_vmware.api [req-b40b67ac-704c-4a13-a59f-6f727870160a req-d282b945-d4ba-4c91-a8ed-39fb384adc6d service nova] Task: {'id': task-1947725, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1884.215134] env[62405]: DEBUG oslo_vmware.api [None req-ba987cca-0cd8-4aa3-98a6-5b3a973bb27e tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Task: {'id': task-1947732, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.704481} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1884.215420] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba987cca-0cd8-4aa3-98a6-5b3a973bb27e tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 8f133517-cff2-40c7-8333-a9116163313a/8f133517-cff2-40c7-8333-a9116163313a.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1884.215634] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-ba987cca-0cd8-4aa3-98a6-5b3a973bb27e tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] [instance: 8f133517-cff2-40c7-8333-a9116163313a] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1884.215890] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8e84c0c8-af2d-458a-9eb0-fbd064a07ef3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.222079] env[62405]: DEBUG oslo_vmware.api [None req-ba987cca-0cd8-4aa3-98a6-5b3a973bb27e tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Waiting for the task: (returnval){ [ 1884.222079] env[62405]: value = "task-1947733" [ 1884.222079] env[62405]: _type = "Task" [ 1884.222079] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1884.229407] env[62405]: DEBUG oslo_vmware.api [None req-ba987cca-0cd8-4aa3-98a6-5b3a973bb27e tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Task: {'id': task-1947733, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1884.251617] env[62405]: DEBUG oslo_vmware.api [None req-aaf55f6a-42fd-4537-9ca0-b62d12c69a21 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52c63cc9-2bad-46b7-bbd6-57afa4dd9349, 'name': SearchDatastore_Task, 'duration_secs': 0.055266} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1884.251886] env[62405]: DEBUG oslo_concurrency.lockutils [None req-aaf55f6a-42fd-4537-9ca0-b62d12c69a21 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1884.252326] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-aaf55f6a-42fd-4537-9ca0-b62d12c69a21 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 08d7be6c-0557-46af-ae8d-e1c68e878cae/08d7be6c-0557-46af-ae8d-e1c68e878cae.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1884.253333] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-41db02db-bd59-4944-a757-1f47d142fdab {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.260927] env[62405]: DEBUG oslo_vmware.api [None req-aaf55f6a-42fd-4537-9ca0-b62d12c69a21 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for the task: (returnval){ [ 1884.260927] env[62405]: value = "task-1947734" [ 1884.260927] env[62405]: _type = "Task" [ 1884.260927] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1884.269812] env[62405]: DEBUG oslo_vmware.api [None req-aaf55f6a-42fd-4537-9ca0-b62d12c69a21 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947734, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1884.503940] env[62405]: DEBUG oslo_concurrency.lockutils [None req-bada42d0-8459-4144-ae42-917e90422d8b tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.911s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1884.507081] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.222s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1884.509284] env[62405]: INFO nova.compute.claims [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1884.522972] env[62405]: DEBUG oslo_vmware.api [req-b40b67ac-704c-4a13-a59f-6f727870160a req-d282b945-d4ba-4c91-a8ed-39fb384adc6d service nova] Task: {'id': task-1947725, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1884.547669] env[62405]: INFO nova.scheduler.client.report [None req-bada42d0-8459-4144-ae42-917e90422d8b tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Deleted allocations for instance 65cd4af4-30cf-4435-8f32-501db450905f [ 1884.732602] env[62405]: DEBUG oslo_vmware.api [None req-ba987cca-0cd8-4aa3-98a6-5b3a973bb27e tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Task: {'id': task-1947733, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077807} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1884.733022] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-ba987cca-0cd8-4aa3-98a6-5b3a973bb27e tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] [instance: 8f133517-cff2-40c7-8333-a9116163313a] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1884.733673] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7596ec64-372d-40e1-89fc-10c4a020557f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.755326] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba987cca-0cd8-4aa3-98a6-5b3a973bb27e tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] [instance: 8f133517-cff2-40c7-8333-a9116163313a] Reconfiguring VM instance instance-00000054 to attach disk [datastore1] 8f133517-cff2-40c7-8333-a9116163313a/8f133517-cff2-40c7-8333-a9116163313a.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1884.755326] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f81a2741-8368-4604-89e0-2ab9f6424759 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.779718] env[62405]: DEBUG oslo_vmware.api [None req-aaf55f6a-42fd-4537-9ca0-b62d12c69a21 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947734, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.49208} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1884.781067] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-aaf55f6a-42fd-4537-9ca0-b62d12c69a21 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 08d7be6c-0557-46af-ae8d-e1c68e878cae/08d7be6c-0557-46af-ae8d-e1c68e878cae.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1884.781291] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-aaf55f6a-42fd-4537-9ca0-b62d12c69a21 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 08d7be6c-0557-46af-ae8d-e1c68e878cae] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1884.781601] env[62405]: DEBUG oslo_vmware.api [None req-ba987cca-0cd8-4aa3-98a6-5b3a973bb27e tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Waiting for the task: (returnval){ [ 1884.781601] env[62405]: value = "task-1947735" [ 1884.781601] env[62405]: _type = "Task" [ 1884.781601] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1884.781789] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c811bd4f-d602-436c-8088-bb4fa171ca5b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.791586] env[62405]: DEBUG oslo_vmware.api [None req-ba987cca-0cd8-4aa3-98a6-5b3a973bb27e tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Task: {'id': task-1947735, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1884.794533] env[62405]: DEBUG oslo_vmware.api [None req-aaf55f6a-42fd-4537-9ca0-b62d12c69a21 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for the task: (returnval){ [ 1884.794533] env[62405]: value = "task-1947736" [ 1884.794533] env[62405]: _type = "Task" [ 1884.794533] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1884.804228] env[62405]: DEBUG oslo_vmware.api [None req-aaf55f6a-42fd-4537-9ca0-b62d12c69a21 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947736, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1885.024435] env[62405]: DEBUG oslo_vmware.api [req-b40b67ac-704c-4a13-a59f-6f727870160a req-d282b945-d4ba-4c91-a8ed-39fb384adc6d service nova] Task: {'id': task-1947725, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1885.058947] env[62405]: DEBUG oslo_concurrency.lockutils [None req-bada42d0-8459-4144-ae42-917e90422d8b tempest-FloatingIPsAssociationNegativeTestJSON-995837819 tempest-FloatingIPsAssociationNegativeTestJSON-995837819-project-member] Lock "65cd4af4-30cf-4435-8f32-501db450905f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.424s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1885.295977] env[62405]: DEBUG oslo_vmware.api [None req-ba987cca-0cd8-4aa3-98a6-5b3a973bb27e tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Task: {'id': task-1947735, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1885.304006] env[62405]: DEBUG oslo_vmware.api [None req-aaf55f6a-42fd-4537-9ca0-b62d12c69a21 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947736, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074164} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1885.304310] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-aaf55f6a-42fd-4537-9ca0-b62d12c69a21 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 08d7be6c-0557-46af-ae8d-e1c68e878cae] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1885.305083] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fc4e842-f0ed-4d36-948d-223c0330a9c5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.337237] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-aaf55f6a-42fd-4537-9ca0-b62d12c69a21 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 08d7be6c-0557-46af-ae8d-e1c68e878cae] Reconfiguring VM instance instance-00000056 to attach disk [datastore1] 08d7be6c-0557-46af-ae8d-e1c68e878cae/08d7be6c-0557-46af-ae8d-e1c68e878cae.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1885.337237] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a39bbb07-ccf3-4064-8960-debd3f4b8f41 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.365020] env[62405]: DEBUG nova.compute.manager [None req-2a99443a-7a55-4c6b-8c69-8838d0e318b7 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59fe34ab-c01d-4083-8bcd-ad6b4133a66f] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1885.365020] env[62405]: DEBUG oslo_vmware.api [None req-aaf55f6a-42fd-4537-9ca0-b62d12c69a21 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for the task: (returnval){ [ 1885.365020] env[62405]: value = "task-1947737" [ 1885.365020] env[62405]: _type = "Task" [ 1885.365020] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1885.365020] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-492d8dcc-feb8-408c-8172-c639e199fe45 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.378586] env[62405]: DEBUG oslo_vmware.api [None req-aaf55f6a-42fd-4537-9ca0-b62d12c69a21 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947737, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1885.521480] env[62405]: DEBUG oslo_vmware.api [req-b40b67ac-704c-4a13-a59f-6f727870160a req-d282b945-d4ba-4c91-a8ed-39fb384adc6d service nova] Task: {'id': task-1947725, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1885.799844] env[62405]: DEBUG oslo_vmware.api [None req-ba987cca-0cd8-4aa3-98a6-5b3a973bb27e tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Task: {'id': task-1947735, 'name': ReconfigVM_Task, 'duration_secs': 0.695488} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1885.801394] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba987cca-0cd8-4aa3-98a6-5b3a973bb27e tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] [instance: 8f133517-cff2-40c7-8333-a9116163313a] Reconfigured VM instance instance-00000054 to attach disk [datastore1] 8f133517-cff2-40c7-8333-a9116163313a/8f133517-cff2-40c7-8333-a9116163313a.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1885.801394] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-75d86d81-e150-4e7c-8b77-1355164f0281 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.807202] env[62405]: DEBUG oslo_vmware.api [None req-ba987cca-0cd8-4aa3-98a6-5b3a973bb27e tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Waiting for the task: (returnval){ [ 1885.807202] env[62405]: value = "task-1947738" [ 1885.807202] env[62405]: _type = "Task" [ 1885.807202] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1885.816630] env[62405]: DEBUG oslo_vmware.api [None req-ba987cca-0cd8-4aa3-98a6-5b3a973bb27e tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Task: {'id': task-1947738, 'name': Rename_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1885.833298] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4021870a-0bdc-4d19-8156-509e523f4432 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.841533] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e84a85f-2930-445e-b948-4d317942d078 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.885321] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19c5cdce-0d68-4da5-8c0f-944af5ede080 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.889323] env[62405]: INFO nova.compute.manager [None req-2a99443a-7a55-4c6b-8c69-8838d0e318b7 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59fe34ab-c01d-4083-8bcd-ad6b4133a66f] instance snapshotting [ 1885.895219] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e12f5fa-9d63-4141-80e3-a6e814410861 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.900504] env[62405]: DEBUG oslo_vmware.api [None req-aaf55f6a-42fd-4537-9ca0-b62d12c69a21 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947737, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1885.917837] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2972776-48ce-4aef-ba0f-bef78c3e8102 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.922371] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed67fca3-fc27-4441-ba58-2b305f89235d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.939326] env[62405]: DEBUG nova.compute.provider_tree [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1886.021737] env[62405]: DEBUG oslo_vmware.api [req-b40b67ac-704c-4a13-a59f-6f727870160a req-d282b945-d4ba-4c91-a8ed-39fb384adc6d service nova] Task: {'id': task-1947725, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1886.317057] env[62405]: DEBUG oslo_vmware.api [None req-ba987cca-0cd8-4aa3-98a6-5b3a973bb27e tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Task: {'id': task-1947738, 'name': Rename_Task, 'duration_secs': 0.297031} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1886.317570] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba987cca-0cd8-4aa3-98a6-5b3a973bb27e tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] [instance: 8f133517-cff2-40c7-8333-a9116163313a] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1886.317647] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f906b810-2e06-4a97-96bf-ffdee0b84cc6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.323962] env[62405]: DEBUG oslo_vmware.api [None req-ba987cca-0cd8-4aa3-98a6-5b3a973bb27e tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Waiting for the task: (returnval){ [ 1886.323962] env[62405]: value = "task-1947739" [ 1886.323962] env[62405]: _type = "Task" [ 1886.323962] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1886.331768] env[62405]: DEBUG oslo_vmware.api [None req-ba987cca-0cd8-4aa3-98a6-5b3a973bb27e tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Task: {'id': task-1947739, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1886.392378] env[62405]: DEBUG oslo_vmware.api [None req-aaf55f6a-42fd-4537-9ca0-b62d12c69a21 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947737, 'name': ReconfigVM_Task, 'duration_secs': 0.922996} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1886.392695] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-aaf55f6a-42fd-4537-9ca0-b62d12c69a21 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 08d7be6c-0557-46af-ae8d-e1c68e878cae] Reconfigured VM instance instance-00000056 to attach disk [datastore1] 08d7be6c-0557-46af-ae8d-e1c68e878cae/08d7be6c-0557-46af-ae8d-e1c68e878cae.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1886.393438] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4defe12e-4ac0-4a63-9bd0-02cbba547312 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.401744] env[62405]: DEBUG oslo_vmware.api [None req-aaf55f6a-42fd-4537-9ca0-b62d12c69a21 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for the task: (returnval){ [ 1886.401744] env[62405]: value = "task-1947740" [ 1886.401744] env[62405]: _type = "Task" [ 1886.401744] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1886.409022] env[62405]: DEBUG oslo_vmware.api [None req-aaf55f6a-42fd-4537-9ca0-b62d12c69a21 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947740, 'name': Rename_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1886.454443] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-2a99443a-7a55-4c6b-8c69-8838d0e318b7 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59fe34ab-c01d-4083-8bcd-ad6b4133a66f] Creating Snapshot of the VM instance {{(pid=62405) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1886.454443] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-60184741-0f5b-4293-941b-bfd02fa1bf58 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.461720] env[62405]: DEBUG oslo_vmware.api [None req-2a99443a-7a55-4c6b-8c69-8838d0e318b7 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Waiting for the task: (returnval){ [ 1886.461720] env[62405]: value = "task-1947741" [ 1886.461720] env[62405]: _type = "Task" [ 1886.461720] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1886.473022] env[62405]: DEBUG oslo_vmware.api [None req-2a99443a-7a55-4c6b-8c69-8838d0e318b7 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947741, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1886.475862] env[62405]: ERROR nova.scheduler.client.report [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [req-05977f0e-18f4-4660-a7b8-6cb6363138a9] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7d5eded7-a501-4fa6-b1d3-60e273d555d7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-05977f0e-18f4-4660-a7b8-6cb6363138a9"}]} [ 1886.500051] env[62405]: DEBUG nova.scheduler.client.report [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Refreshing inventories for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1886.514991] env[62405]: DEBUG nova.scheduler.client.report [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Updating ProviderTree inventory for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1886.515276] env[62405]: DEBUG nova.compute.provider_tree [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1886.522521] env[62405]: DEBUG oslo_vmware.api [req-b40b67ac-704c-4a13-a59f-6f727870160a req-d282b945-d4ba-4c91-a8ed-39fb384adc6d service nova] Task: {'id': task-1947725, 'name': ReconfigVM_Task, 'duration_secs': 6.057527} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1886.522908] env[62405]: DEBUG oslo_concurrency.lockutils [req-b40b67ac-704c-4a13-a59f-6f727870160a req-d282b945-d4ba-4c91-a8ed-39fb384adc6d service nova] Releasing lock "9d97bf1d-6830-48b1-831b-bf2b52188f32" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1886.523146] env[62405]: DEBUG nova.virt.vmwareapi.vmops [req-b40b67ac-704c-4a13-a59f-6f727870160a req-d282b945-d4ba-4c91-a8ed-39fb384adc6d service nova] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Reconfigured VM to detach interface {{(pid=62405) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1886.523511] env[62405]: DEBUG oslo_concurrency.lockutils [None req-94307d94-a95d-4f55-a557-307a834f6cba tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Lock "9d97bf1d-6830-48b1-831b-bf2b52188f32" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 6.578s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1886.523915] env[62405]: DEBUG oslo_concurrency.lockutils [None req-94307d94-a95d-4f55-a557-307a834f6cba tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquiring lock "9d97bf1d-6830-48b1-831b-bf2b52188f32-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1886.524150] env[62405]: DEBUG oslo_concurrency.lockutils [None req-94307d94-a95d-4f55-a557-307a834f6cba tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Lock "9d97bf1d-6830-48b1-831b-bf2b52188f32-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1886.525112] env[62405]: DEBUG oslo_concurrency.lockutils [None req-94307d94-a95d-4f55-a557-307a834f6cba tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Lock "9d97bf1d-6830-48b1-831b-bf2b52188f32-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1886.529521] env[62405]: INFO nova.compute.manager [None req-94307d94-a95d-4f55-a557-307a834f6cba tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Terminating instance [ 1886.547193] env[62405]: DEBUG nova.scheduler.client.report [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Refreshing aggregate associations for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7, aggregates: None {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1886.572745] env[62405]: DEBUG nova.scheduler.client.report [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Refreshing trait associations for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1886.837780] env[62405]: DEBUG oslo_vmware.api [None req-ba987cca-0cd8-4aa3-98a6-5b3a973bb27e tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Task: {'id': task-1947739, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1886.932450] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12242e4e-fac4-40d5-944e-7ff40958cbf4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.956867] env[62405]: DEBUG oslo_vmware.api [None req-aaf55f6a-42fd-4537-9ca0-b62d12c69a21 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947740, 'name': Rename_Task, 'duration_secs': 0.221188} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1886.957705] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-aaf55f6a-42fd-4537-9ca0-b62d12c69a21 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 08d7be6c-0557-46af-ae8d-e1c68e878cae] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1886.960573] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dec1a2d-01ac-4e7f-8ccc-e1044ff670fc {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.964490] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a888d93d-5e12-4142-bcfc-e780fc7b1647 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.977450] env[62405]: DEBUG oslo_vmware.api [None req-2a99443a-7a55-4c6b-8c69-8838d0e318b7 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947741, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1887.016941] env[62405]: DEBUG oslo_vmware.api [None req-aaf55f6a-42fd-4537-9ca0-b62d12c69a21 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for the task: (returnval){ [ 1887.016941] env[62405]: value = "task-1947742" [ 1887.016941] env[62405]: _type = "Task" [ 1887.016941] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1887.017786] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c05bebf-9c7f-4b77-8555-6e4e2a048a1a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.032166] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5a27d83-2b4e-4edc-9979-948dfba4aaa1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.038671] env[62405]: DEBUG oslo_vmware.api [None req-aaf55f6a-42fd-4537-9ca0-b62d12c69a21 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947742, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1887.038671] env[62405]: DEBUG nova.compute.manager [None req-94307d94-a95d-4f55-a557-307a834f6cba tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1887.038671] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-94307d94-a95d-4f55-a557-307a834f6cba tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1887.038671] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d98e450-9327-45da-b51d-1aa5d52c3939 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.050214] env[62405]: DEBUG nova.compute.provider_tree [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1887.054138] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-94307d94-a95d-4f55-a557-307a834f6cba tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1887.054704] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6ef2efd7-f4f9-4b4f-8e4d-22409db795f4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.062129] env[62405]: DEBUG oslo_vmware.api [None req-94307d94-a95d-4f55-a557-307a834f6cba tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Waiting for the task: (returnval){ [ 1887.062129] env[62405]: value = "task-1947743" [ 1887.062129] env[62405]: _type = "Task" [ 1887.062129] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1887.070705] env[62405]: DEBUG oslo_vmware.api [None req-94307d94-a95d-4f55-a557-307a834f6cba tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947743, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1887.335974] env[62405]: DEBUG oslo_vmware.api [None req-ba987cca-0cd8-4aa3-98a6-5b3a973bb27e tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Task: {'id': task-1947739, 'name': PowerOnVM_Task, 'duration_secs': 0.656886} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1887.336431] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba987cca-0cd8-4aa3-98a6-5b3a973bb27e tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] [instance: 8f133517-cff2-40c7-8333-a9116163313a] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1887.336513] env[62405]: DEBUG nova.compute.manager [None req-ba987cca-0cd8-4aa3-98a6-5b3a973bb27e tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] [instance: 8f133517-cff2-40c7-8333-a9116163313a] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1887.337542] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bf99121-0357-4d3b-9ca9-106a3b858901 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.476578] env[62405]: DEBUG oslo_vmware.api [None req-2a99443a-7a55-4c6b-8c69-8838d0e318b7 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947741, 'name': CreateSnapshot_Task, 'duration_secs': 0.64041} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1887.476827] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-2a99443a-7a55-4c6b-8c69-8838d0e318b7 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59fe34ab-c01d-4083-8bcd-ad6b4133a66f] Created Snapshot of the VM instance {{(pid=62405) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1887.477649] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a85dbfad-3bc9-45b0-9842-255925663f94 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.534036] env[62405]: DEBUG oslo_vmware.api [None req-aaf55f6a-42fd-4537-9ca0-b62d12c69a21 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947742, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1887.573023] env[62405]: DEBUG oslo_vmware.api [None req-94307d94-a95d-4f55-a557-307a834f6cba tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947743, 'name': PowerOffVM_Task, 'duration_secs': 0.225325} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1887.573023] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-94307d94-a95d-4f55-a557-307a834f6cba tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1887.573023] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-94307d94-a95d-4f55-a557-307a834f6cba tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1887.573023] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-81400755-bd32-41ff-9a86-665a0d30377e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.587039] env[62405]: DEBUG nova.scheduler.client.report [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Updated inventory for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with generation 134 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1887.587346] env[62405]: DEBUG nova.compute.provider_tree [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Updating resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 generation from 134 to 135 during operation: update_inventory {{(pid=62405) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1887.587585] env[62405]: DEBUG nova.compute.provider_tree [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1887.756063] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-94307d94-a95d-4f55-a557-307a834f6cba tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1887.756121] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-94307d94-a95d-4f55-a557-307a834f6cba tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1887.756335] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-94307d94-a95d-4f55-a557-307a834f6cba tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Deleting the datastore file [datastore1] 9d97bf1d-6830-48b1-831b-bf2b52188f32 {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1887.756582] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c8208132-9d18-416d-a3e3-6ed2d95c6427 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.767248] env[62405]: DEBUG oslo_vmware.api [None req-94307d94-a95d-4f55-a557-307a834f6cba tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Waiting for the task: (returnval){ [ 1887.767248] env[62405]: value = "task-1947745" [ 1887.767248] env[62405]: _type = "Task" [ 1887.767248] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1887.780482] env[62405]: DEBUG oslo_vmware.api [None req-94307d94-a95d-4f55-a557-307a834f6cba tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947745, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1887.859504] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ba987cca-0cd8-4aa3-98a6-5b3a973bb27e tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1888.001919] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-2a99443a-7a55-4c6b-8c69-8838d0e318b7 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59fe34ab-c01d-4083-8bcd-ad6b4133a66f] Creating linked-clone VM from snapshot {{(pid=62405) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1888.002290] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-1de6c1e4-4aa8-41dd-a386-e0ddc757f42b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.012684] env[62405]: DEBUG oslo_vmware.api [None req-2a99443a-7a55-4c6b-8c69-8838d0e318b7 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Waiting for the task: (returnval){ [ 1888.012684] env[62405]: value = "task-1947746" [ 1888.012684] env[62405]: _type = "Task" [ 1888.012684] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1888.023068] env[62405]: DEBUG oslo_vmware.api [None req-2a99443a-7a55-4c6b-8c69-8838d0e318b7 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947746, 'name': CloneVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1888.032025] env[62405]: DEBUG oslo_vmware.api [None req-aaf55f6a-42fd-4537-9ca0-b62d12c69a21 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947742, 'name': PowerOnVM_Task, 'duration_secs': 1.015619} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1888.032764] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-aaf55f6a-42fd-4537-9ca0-b62d12c69a21 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 08d7be6c-0557-46af-ae8d-e1c68e878cae] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1888.032873] env[62405]: INFO nova.compute.manager [None req-aaf55f6a-42fd-4537-9ca0-b62d12c69a21 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 08d7be6c-0557-46af-ae8d-e1c68e878cae] Took 9.32 seconds to spawn the instance on the hypervisor. [ 1888.033070] env[62405]: DEBUG nova.compute.manager [None req-aaf55f6a-42fd-4537-9ca0-b62d12c69a21 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 08d7be6c-0557-46af-ae8d-e1c68e878cae] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1888.033982] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a31cf1df-ccd6-4cd6-b47f-74ea5f2fb97a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.095305] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.588s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1888.095922] env[62405]: DEBUG nova.compute.manager [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1888.098874] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.794s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1888.100351] env[62405]: INFO nova.compute.claims [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: 06dbb3e0-876e-4290-81f5-6f95f9d5cb37] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1888.278434] env[62405]: DEBUG oslo_vmware.api [None req-94307d94-a95d-4f55-a557-307a834f6cba tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947745, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.304334} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1888.278583] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-94307d94-a95d-4f55-a557-307a834f6cba tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1888.279217] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-94307d94-a95d-4f55-a557-307a834f6cba tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1888.279217] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-94307d94-a95d-4f55-a557-307a834f6cba tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1888.279217] env[62405]: INFO nova.compute.manager [None req-94307d94-a95d-4f55-a557-307a834f6cba tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Took 1.24 seconds to destroy the instance on the hypervisor. [ 1888.279494] env[62405]: DEBUG oslo.service.loopingcall [None req-94307d94-a95d-4f55-a557-307a834f6cba tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1888.279731] env[62405]: DEBUG nova.compute.manager [-] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1888.279875] env[62405]: DEBUG nova.network.neutron [-] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1888.346069] env[62405]: DEBUG neutronclient.v2_0.client [-] Error message: {"NeutronError": {"type": "PortNotFound", "message": "Port bfef94b3-682e-48fb-8149-02040e229cfb could not be found.", "detail": ""}} {{(pid=62405) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1888.346362] env[62405]: DEBUG nova.network.neutron [-] Unable to show port bfef94b3-682e-48fb-8149-02040e229cfb as it no longer exists. {{(pid=62405) _unbind_ports /opt/stack/nova/nova/network/neutron.py:666}} [ 1888.523801] env[62405]: DEBUG oslo_vmware.api [None req-2a99443a-7a55-4c6b-8c69-8838d0e318b7 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947746, 'name': CloneVM_Task} progress is 94%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1888.557197] env[62405]: INFO nova.compute.manager [None req-aaf55f6a-42fd-4537-9ca0-b62d12c69a21 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 08d7be6c-0557-46af-ae8d-e1c68e878cae] Took 36.53 seconds to build instance. [ 1888.606026] env[62405]: DEBUG nova.compute.utils [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1888.611030] env[62405]: DEBUG nova.compute.manager [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1888.611030] env[62405]: DEBUG nova.network.neutron [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1888.653979] env[62405]: DEBUG nova.policy [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4ad5e220132245168b59ff3df599b974', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f3b50cc219314108945bfc8b2c21849a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1888.866513] env[62405]: INFO nova.compute.manager [None req-498743dc-41af-4c87-aa82-0c815e0fe830 tempest-ServersAdmin275Test-1825578340 tempest-ServersAdmin275Test-1825578340-project-admin] [instance: 8f133517-cff2-40c7-8333-a9116163313a] Rebuilding instance [ 1888.935560] env[62405]: DEBUG nova.compute.manager [None req-498743dc-41af-4c87-aa82-0c815e0fe830 tempest-ServersAdmin275Test-1825578340 tempest-ServersAdmin275Test-1825578340-project-admin] [instance: 8f133517-cff2-40c7-8333-a9116163313a] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1888.936535] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4cc1527-cfe8-476a-9170-ce7614086fef {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.000411] env[62405]: DEBUG nova.network.neutron [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Successfully created port: 2026016a-87b1-42ae-a04f-d95c5fb37377 {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1889.027529] env[62405]: DEBUG oslo_vmware.api [None req-2a99443a-7a55-4c6b-8c69-8838d0e318b7 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947746, 'name': CloneVM_Task} progress is 94%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1889.062083] env[62405]: DEBUG oslo_concurrency.lockutils [None req-aaf55f6a-42fd-4537-9ca0-b62d12c69a21 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Lock "08d7be6c-0557-46af-ae8d-e1c68e878cae" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.044s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1889.112560] env[62405]: DEBUG nova.compute.manager [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1889.459775] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db400258-bb64-4a37-a0c2-31d5738bd109 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.468805] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbe6dbe6-f843-44ca-82ac-6432e9a765f9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.508056] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d7cf8d1-796e-4cb4-a65b-51ababd6ede9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.512435] env[62405]: DEBUG nova.compute.manager [req-af45d668-87b7-469d-810b-66d3a01ee22e req-f02ebff8-8e51-4d65-a5ff-8dba94c7dec6 service nova] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Received event network-vif-deleted-09308517-a17c-48d3-b01f-fed73b19adfd {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1889.512435] env[62405]: INFO nova.compute.manager [req-af45d668-87b7-469d-810b-66d3a01ee22e req-f02ebff8-8e51-4d65-a5ff-8dba94c7dec6 service nova] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Neutron deleted interface 09308517-a17c-48d3-b01f-fed73b19adfd; detaching it from the instance and deleting it from the info cache [ 1889.512527] env[62405]: DEBUG nova.network.neutron [req-af45d668-87b7-469d-810b-66d3a01ee22e req-f02ebff8-8e51-4d65-a5ff-8dba94c7dec6 service nova] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1889.523922] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2247035c-f2b2-4013-a79e-de73e9879d84 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.533144] env[62405]: DEBUG oslo_vmware.api [None req-2a99443a-7a55-4c6b-8c69-8838d0e318b7 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947746, 'name': CloneVM_Task} progress is 95%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1889.544264] env[62405]: DEBUG nova.compute.provider_tree [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1889.658924] env[62405]: DEBUG nova.network.neutron [-] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1889.959234] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-498743dc-41af-4c87-aa82-0c815e0fe830 tempest-ServersAdmin275Test-1825578340 tempest-ServersAdmin275Test-1825578340-project-admin] [instance: 8f133517-cff2-40c7-8333-a9116163313a] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1889.959234] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7b2945a9-3723-4615-96fb-3cb7e01a7da1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.967490] env[62405]: DEBUG oslo_vmware.api [None req-498743dc-41af-4c87-aa82-0c815e0fe830 tempest-ServersAdmin275Test-1825578340 tempest-ServersAdmin275Test-1825578340-project-admin] Waiting for the task: (returnval){ [ 1889.967490] env[62405]: value = "task-1947747" [ 1889.967490] env[62405]: _type = "Task" [ 1889.967490] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1889.977886] env[62405]: DEBUG oslo_vmware.api [None req-498743dc-41af-4c87-aa82-0c815e0fe830 tempest-ServersAdmin275Test-1825578340 tempest-ServersAdmin275Test-1825578340-project-admin] Task: {'id': task-1947747, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1890.016019] env[62405]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b570876e-9c3e-4ed9-889c-afda364998d3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.027569] env[62405]: DEBUG oslo_vmware.api [None req-2a99443a-7a55-4c6b-8c69-8838d0e318b7 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947746, 'name': CloneVM_Task, 'duration_secs': 1.954923} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1890.028835] env[62405]: INFO nova.virt.vmwareapi.vmops [None req-2a99443a-7a55-4c6b-8c69-8838d0e318b7 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59fe34ab-c01d-4083-8bcd-ad6b4133a66f] Created linked-clone VM from snapshot [ 1890.029832] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a3ab93a-0f2d-4330-8b73-7f9d4dfd5bcc {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.035575] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ff200a3-e331-4ef9-9e87-6304935ed6a2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.055279] env[62405]: DEBUG nova.virt.vmwareapi.images [None req-2a99443a-7a55-4c6b-8c69-8838d0e318b7 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59fe34ab-c01d-4083-8bcd-ad6b4133a66f] Uploading image b8f122c6-183d-49dc-b088-8f71ee372deb {{(pid=62405) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1890.072229] env[62405]: DEBUG nova.compute.manager [req-af45d668-87b7-469d-810b-66d3a01ee22e req-f02ebff8-8e51-4d65-a5ff-8dba94c7dec6 service nova] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Detach interface failed, port_id=09308517-a17c-48d3-b01f-fed73b19adfd, reason: Instance 9d97bf1d-6830-48b1-831b-bf2b52188f32 could not be found. {{(pid=62405) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11483}} [ 1890.073447] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a99443a-7a55-4c6b-8c69-8838d0e318b7 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59fe34ab-c01d-4083-8bcd-ad6b4133a66f] Destroying the VM {{(pid=62405) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1890.073712] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-d7432377-0ee6-4ea4-aed3-033949cb2bd2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.080528] env[62405]: DEBUG oslo_vmware.api [None req-2a99443a-7a55-4c6b-8c69-8838d0e318b7 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Waiting for the task: (returnval){ [ 1890.080528] env[62405]: value = "task-1947748" [ 1890.080528] env[62405]: _type = "Task" [ 1890.080528] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1890.089036] env[62405]: DEBUG oslo_vmware.api [None req-2a99443a-7a55-4c6b-8c69-8838d0e318b7 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947748, 'name': Destroy_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1890.108145] env[62405]: DEBUG nova.scheduler.client.report [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Updated inventory for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with generation 135 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1890.108419] env[62405]: DEBUG nova.compute.provider_tree [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Updating resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 generation from 135 to 136 during operation: update_inventory {{(pid=62405) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1890.108675] env[62405]: DEBUG nova.compute.provider_tree [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1890.134777] env[62405]: DEBUG nova.compute.manager [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1890.138302] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a62c8c96-a351-4d4e-933d-919287c9ae5f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.146397] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-df542e74-4dcd-4225-bb2f-3e6c587f7aff tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 08d7be6c-0557-46af-ae8d-e1c68e878cae] Suspending the VM {{(pid=62405) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1890.146665] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-64ea323c-262f-4f62-9c25-e853a276ffb3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.154476] env[62405]: DEBUG oslo_vmware.api [None req-df542e74-4dcd-4225-bb2f-3e6c587f7aff tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for the task: (returnval){ [ 1890.154476] env[62405]: value = "task-1947749" [ 1890.154476] env[62405]: _type = "Task" [ 1890.154476] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1890.159800] env[62405]: DEBUG nova.virt.hardware [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1890.160075] env[62405]: DEBUG nova.virt.hardware [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1890.160246] env[62405]: DEBUG nova.virt.hardware [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1890.160433] env[62405]: DEBUG nova.virt.hardware [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1890.160583] env[62405]: DEBUG nova.virt.hardware [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1890.160734] env[62405]: DEBUG nova.virt.hardware [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1890.160944] env[62405]: DEBUG nova.virt.hardware [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1890.161122] env[62405]: DEBUG nova.virt.hardware [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1890.161292] env[62405]: DEBUG nova.virt.hardware [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1890.161457] env[62405]: DEBUG nova.virt.hardware [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1890.161632] env[62405]: DEBUG nova.virt.hardware [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1890.162710] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80ebf398-11bc-42a9-940c-6b8ccf804b1d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.165598] env[62405]: INFO nova.compute.manager [-] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Took 1.89 seconds to deallocate network for instance. [ 1890.173175] env[62405]: DEBUG oslo_vmware.api [None req-df542e74-4dcd-4225-bb2f-3e6c587f7aff tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947749, 'name': SuspendVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1890.176467] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de7a5e06-105c-4914-900a-3fc7c89a4d06 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.478676] env[62405]: DEBUG oslo_vmware.api [None req-498743dc-41af-4c87-aa82-0c815e0fe830 tempest-ServersAdmin275Test-1825578340 tempest-ServersAdmin275Test-1825578340-project-admin] Task: {'id': task-1947747, 'name': PowerOffVM_Task, 'duration_secs': 0.133373} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1890.479433] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-498743dc-41af-4c87-aa82-0c815e0fe830 tempest-ServersAdmin275Test-1825578340 tempest-ServersAdmin275Test-1825578340-project-admin] [instance: 8f133517-cff2-40c7-8333-a9116163313a] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1890.479813] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-498743dc-41af-4c87-aa82-0c815e0fe830 tempest-ServersAdmin275Test-1825578340 tempest-ServersAdmin275Test-1825578340-project-admin] [instance: 8f133517-cff2-40c7-8333-a9116163313a] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1890.480830] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3044254-4f11-4b56-90f2-d5cf96e92c9c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.489891] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-498743dc-41af-4c87-aa82-0c815e0fe830 tempest-ServersAdmin275Test-1825578340 tempest-ServersAdmin275Test-1825578340-project-admin] [instance: 8f133517-cff2-40c7-8333-a9116163313a] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1890.489891] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3e3842d3-aaad-4b42-98ce-dd0763bb8cf4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.516283] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-498743dc-41af-4c87-aa82-0c815e0fe830 tempest-ServersAdmin275Test-1825578340 tempest-ServersAdmin275Test-1825578340-project-admin] [instance: 8f133517-cff2-40c7-8333-a9116163313a] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1890.516770] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-498743dc-41af-4c87-aa82-0c815e0fe830 tempest-ServersAdmin275Test-1825578340 tempest-ServersAdmin275Test-1825578340-project-admin] [instance: 8f133517-cff2-40c7-8333-a9116163313a] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1890.516964] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-498743dc-41af-4c87-aa82-0c815e0fe830 tempest-ServersAdmin275Test-1825578340 tempest-ServersAdmin275Test-1825578340-project-admin] Deleting the datastore file [datastore1] 8f133517-cff2-40c7-8333-a9116163313a {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1890.517237] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5c21a675-3022-4be6-bea6-03d5dd29258e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.523777] env[62405]: DEBUG oslo_vmware.api [None req-498743dc-41af-4c87-aa82-0c815e0fe830 tempest-ServersAdmin275Test-1825578340 tempest-ServersAdmin275Test-1825578340-project-admin] Waiting for the task: (returnval){ [ 1890.523777] env[62405]: value = "task-1947751" [ 1890.523777] env[62405]: _type = "Task" [ 1890.523777] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1890.532120] env[62405]: DEBUG oslo_vmware.api [None req-498743dc-41af-4c87-aa82-0c815e0fe830 tempest-ServersAdmin275Test-1825578340 tempest-ServersAdmin275Test-1825578340-project-admin] Task: {'id': task-1947751, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1890.593829] env[62405]: DEBUG oslo_vmware.api [None req-2a99443a-7a55-4c6b-8c69-8838d0e318b7 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947748, 'name': Destroy_Task} progress is 33%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1890.615328] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.516s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1890.616037] env[62405]: DEBUG nova.compute.manager [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: 06dbb3e0-876e-4290-81f5-6f95f9d5cb37] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1890.618851] env[62405]: DEBUG oslo_concurrency.lockutils [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.148s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1890.620314] env[62405]: INFO nova.compute.claims [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1890.626172] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0d7e6b08-7f72-4b04-9aac-3f761d272373 tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] Acquiring lock "4c8c0d2f-d8d3-4422-8a5c-8999636b22be" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1890.626172] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0d7e6b08-7f72-4b04-9aac-3f761d272373 tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] Lock "4c8c0d2f-d8d3-4422-8a5c-8999636b22be" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1890.626172] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0d7e6b08-7f72-4b04-9aac-3f761d272373 tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] Acquiring lock "4c8c0d2f-d8d3-4422-8a5c-8999636b22be-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1890.626172] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0d7e6b08-7f72-4b04-9aac-3f761d272373 tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] Lock "4c8c0d2f-d8d3-4422-8a5c-8999636b22be-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1890.626536] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0d7e6b08-7f72-4b04-9aac-3f761d272373 tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] Lock "4c8c0d2f-d8d3-4422-8a5c-8999636b22be-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1890.631818] env[62405]: INFO nova.compute.manager [None req-0d7e6b08-7f72-4b04-9aac-3f761d272373 tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] [instance: 4c8c0d2f-d8d3-4422-8a5c-8999636b22be] Terminating instance [ 1890.667787] env[62405]: DEBUG oslo_vmware.api [None req-df542e74-4dcd-4225-bb2f-3e6c587f7aff tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947749, 'name': SuspendVM_Task} progress is 66%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1890.670072] env[62405]: DEBUG nova.network.neutron [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Successfully updated port: 2026016a-87b1-42ae-a04f-d95c5fb37377 {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1890.678398] env[62405]: DEBUG oslo_concurrency.lockutils [None req-94307d94-a95d-4f55-a557-307a834f6cba tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1890.751432] env[62405]: DEBUG nova.compute.manager [req-716a9153-9230-4add-9458-0899d90e178f req-d745663f-7968-4ac8-b6b6-8f62e7d26411 service nova] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Received event network-vif-plugged-2026016a-87b1-42ae-a04f-d95c5fb37377 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1890.751782] env[62405]: DEBUG oslo_concurrency.lockutils [req-716a9153-9230-4add-9458-0899d90e178f req-d745663f-7968-4ac8-b6b6-8f62e7d26411 service nova] Acquiring lock "c39d9059-8da4-4c8d-99ab-d66b8445e7da-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1890.753966] env[62405]: DEBUG oslo_concurrency.lockutils [req-716a9153-9230-4add-9458-0899d90e178f req-d745663f-7968-4ac8-b6b6-8f62e7d26411 service nova] Lock "c39d9059-8da4-4c8d-99ab-d66b8445e7da-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1890.754260] env[62405]: DEBUG oslo_concurrency.lockutils [req-716a9153-9230-4add-9458-0899d90e178f req-d745663f-7968-4ac8-b6b6-8f62e7d26411 service nova] Lock "c39d9059-8da4-4c8d-99ab-d66b8445e7da-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.002s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1890.754460] env[62405]: DEBUG nova.compute.manager [req-716a9153-9230-4add-9458-0899d90e178f req-d745663f-7968-4ac8-b6b6-8f62e7d26411 service nova] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] No waiting events found dispatching network-vif-plugged-2026016a-87b1-42ae-a04f-d95c5fb37377 {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1890.755094] env[62405]: WARNING nova.compute.manager [req-716a9153-9230-4add-9458-0899d90e178f req-d745663f-7968-4ac8-b6b6-8f62e7d26411 service nova] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Received unexpected event network-vif-plugged-2026016a-87b1-42ae-a04f-d95c5fb37377 for instance with vm_state building and task_state spawning. [ 1891.034265] env[62405]: DEBUG oslo_vmware.api [None req-498743dc-41af-4c87-aa82-0c815e0fe830 tempest-ServersAdmin275Test-1825578340 tempest-ServersAdmin275Test-1825578340-project-admin] Task: {'id': task-1947751, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.159074} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1891.034586] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-498743dc-41af-4c87-aa82-0c815e0fe830 tempest-ServersAdmin275Test-1825578340 tempest-ServersAdmin275Test-1825578340-project-admin] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1891.034829] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-498743dc-41af-4c87-aa82-0c815e0fe830 tempest-ServersAdmin275Test-1825578340 tempest-ServersAdmin275Test-1825578340-project-admin] [instance: 8f133517-cff2-40c7-8333-a9116163313a] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1891.035638] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-498743dc-41af-4c87-aa82-0c815e0fe830 tempest-ServersAdmin275Test-1825578340 tempest-ServersAdmin275Test-1825578340-project-admin] [instance: 8f133517-cff2-40c7-8333-a9116163313a] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1891.090783] env[62405]: DEBUG oslo_vmware.api [None req-2a99443a-7a55-4c6b-8c69-8838d0e318b7 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947748, 'name': Destroy_Task, 'duration_secs': 0.965045} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1891.090990] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-2a99443a-7a55-4c6b-8c69-8838d0e318b7 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59fe34ab-c01d-4083-8bcd-ad6b4133a66f] Destroyed the VM [ 1891.091244] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-2a99443a-7a55-4c6b-8c69-8838d0e318b7 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59fe34ab-c01d-4083-8bcd-ad6b4133a66f] Deleting Snapshot of the VM instance {{(pid=62405) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1891.091491] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-ec6f45b8-ab46-4530-9c82-849411d7495e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.097677] env[62405]: DEBUG oslo_vmware.api [None req-2a99443a-7a55-4c6b-8c69-8838d0e318b7 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Waiting for the task: (returnval){ [ 1891.097677] env[62405]: value = "task-1947752" [ 1891.097677] env[62405]: _type = "Task" [ 1891.097677] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1891.106237] env[62405]: DEBUG oslo_vmware.api [None req-2a99443a-7a55-4c6b-8c69-8838d0e318b7 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947752, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1891.126127] env[62405]: DEBUG nova.compute.utils [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1891.127467] env[62405]: DEBUG nova.compute.manager [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: 06dbb3e0-876e-4290-81f5-6f95f9d5cb37] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1891.130864] env[62405]: DEBUG nova.network.neutron [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: 06dbb3e0-876e-4290-81f5-6f95f9d5cb37] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1891.136261] env[62405]: DEBUG nova.compute.manager [None req-0d7e6b08-7f72-4b04-9aac-3f761d272373 tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] [instance: 4c8c0d2f-d8d3-4422-8a5c-8999636b22be] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1891.136467] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-0d7e6b08-7f72-4b04-9aac-3f761d272373 tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] [instance: 4c8c0d2f-d8d3-4422-8a5c-8999636b22be] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1891.137315] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c2a6016-c1ab-40b8-a58d-32f7c225a236 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.146854] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d7e6b08-7f72-4b04-9aac-3f761d272373 tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] [instance: 4c8c0d2f-d8d3-4422-8a5c-8999636b22be] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1891.147136] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1c5d38ae-15d9-421d-a2cb-aa2f421a86f9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.155547] env[62405]: DEBUG oslo_vmware.api [None req-0d7e6b08-7f72-4b04-9aac-3f761d272373 tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] Waiting for the task: (returnval){ [ 1891.155547] env[62405]: value = "task-1947753" [ 1891.155547] env[62405]: _type = "Task" [ 1891.155547] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1891.167460] env[62405]: DEBUG oslo_vmware.api [None req-0d7e6b08-7f72-4b04-9aac-3f761d272373 tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] Task: {'id': task-1947753, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1891.170716] env[62405]: DEBUG oslo_vmware.api [None req-df542e74-4dcd-4225-bb2f-3e6c587f7aff tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947749, 'name': SuspendVM_Task, 'duration_secs': 0.909512} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1891.171350] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-df542e74-4dcd-4225-bb2f-3e6c587f7aff tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 08d7be6c-0557-46af-ae8d-e1c68e878cae] Suspended the VM {{(pid=62405) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1891.171350] env[62405]: DEBUG nova.compute.manager [None req-df542e74-4dcd-4225-bb2f-3e6c587f7aff tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 08d7be6c-0557-46af-ae8d-e1c68e878cae] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1891.171967] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49386bcc-d11d-4f83-96d6-f607d829a7c7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.175200] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Acquiring lock "refresh_cache-c39d9059-8da4-4c8d-99ab-d66b8445e7da" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1891.175358] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Acquired lock "refresh_cache-c39d9059-8da4-4c8d-99ab-d66b8445e7da" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1891.175522] env[62405]: DEBUG nova.network.neutron [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1891.178984] env[62405]: DEBUG nova.policy [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '766cbfd2f4944dc5b4bb3c210c4c6a95', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a949e89f885745acb15d0afd4893ce68', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1891.304694] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Acquiring lock "6fcfada3-d73a-4814-bf45-d34b26d76d4a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1891.305139] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Lock "6fcfada3-d73a-4814-bf45-d34b26d76d4a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1891.466130] env[62405]: DEBUG nova.network.neutron [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: 06dbb3e0-876e-4290-81f5-6f95f9d5cb37] Successfully created port: 339b22b0-3451-4284-a022-8823b059c08d {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1891.490282] env[62405]: DEBUG oslo_concurrency.lockutils [None req-acac08d9-89b6-4d95-b005-8df87420e172 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Acquiring lock "2c623c00-92f2-4cc4-8503-963c3308d708" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1891.490613] env[62405]: DEBUG oslo_concurrency.lockutils [None req-acac08d9-89b6-4d95-b005-8df87420e172 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Lock "2c623c00-92f2-4cc4-8503-963c3308d708" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1891.490739] env[62405]: DEBUG oslo_concurrency.lockutils [None req-acac08d9-89b6-4d95-b005-8df87420e172 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Acquiring lock "2c623c00-92f2-4cc4-8503-963c3308d708-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1891.490920] env[62405]: DEBUG oslo_concurrency.lockutils [None req-acac08d9-89b6-4d95-b005-8df87420e172 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Lock "2c623c00-92f2-4cc4-8503-963c3308d708-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1891.491100] env[62405]: DEBUG oslo_concurrency.lockutils [None req-acac08d9-89b6-4d95-b005-8df87420e172 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Lock "2c623c00-92f2-4cc4-8503-963c3308d708-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1891.493924] env[62405]: INFO nova.compute.manager [None req-acac08d9-89b6-4d95-b005-8df87420e172 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 2c623c00-92f2-4cc4-8503-963c3308d708] Terminating instance [ 1891.610669] env[62405]: DEBUG oslo_vmware.api [None req-2a99443a-7a55-4c6b-8c69-8838d0e318b7 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947752, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1891.631028] env[62405]: DEBUG nova.compute.manager [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: 06dbb3e0-876e-4290-81f5-6f95f9d5cb37] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1891.669644] env[62405]: DEBUG oslo_vmware.api [None req-0d7e6b08-7f72-4b04-9aac-3f761d272373 tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] Task: {'id': task-1947753, 'name': PowerOffVM_Task, 'duration_secs': 0.206314} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1891.670126] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d7e6b08-7f72-4b04-9aac-3f761d272373 tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] [instance: 4c8c0d2f-d8d3-4422-8a5c-8999636b22be] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1891.670126] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-0d7e6b08-7f72-4b04-9aac-3f761d272373 tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] [instance: 4c8c0d2f-d8d3-4422-8a5c-8999636b22be] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1891.670470] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-85c2a627-562e-4f72-92de-35f05fe68385 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.721939] env[62405]: DEBUG nova.network.neutron [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1891.809975] env[62405]: DEBUG nova.compute.manager [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1891.846595] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-0d7e6b08-7f72-4b04-9aac-3f761d272373 tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] [instance: 4c8c0d2f-d8d3-4422-8a5c-8999636b22be] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1891.846807] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-0d7e6b08-7f72-4b04-9aac-3f761d272373 tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] [instance: 4c8c0d2f-d8d3-4422-8a5c-8999636b22be] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1891.846989] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-0d7e6b08-7f72-4b04-9aac-3f761d272373 tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] Deleting the datastore file [datastore1] 4c8c0d2f-d8d3-4422-8a5c-8999636b22be {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1891.847270] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-39a67927-91b8-4f21-b740-6a052ae02f88 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.859429] env[62405]: DEBUG oslo_vmware.api [None req-0d7e6b08-7f72-4b04-9aac-3f761d272373 tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] Waiting for the task: (returnval){ [ 1891.859429] env[62405]: value = "task-1947755" [ 1891.859429] env[62405]: _type = "Task" [ 1891.859429] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1891.871028] env[62405]: DEBUG oslo_vmware.api [None req-0d7e6b08-7f72-4b04-9aac-3f761d272373 tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] Task: {'id': task-1947755, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1891.968227] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26a2d6cf-a700-45ef-9c6e-92016ec5dbee {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.977685] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1737d6c6-30a8-4cec-b845-bcd8a6d980e5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.012811] env[62405]: DEBUG nova.compute.manager [None req-acac08d9-89b6-4d95-b005-8df87420e172 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 2c623c00-92f2-4cc4-8503-963c3308d708] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1892.013166] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-acac08d9-89b6-4d95-b005-8df87420e172 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 2c623c00-92f2-4cc4-8503-963c3308d708] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1892.014461] env[62405]: DEBUG nova.network.neutron [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Updating instance_info_cache with network_info: [{"id": "2026016a-87b1-42ae-a04f-d95c5fb37377", "address": "fa:16:3e:bc:e8:85", "network": {"id": "3ca0a5a2-a18f-47fa-9d7b-0e27aa414878", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1312490542-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f3b50cc219314108945bfc8b2c21849a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7edb7c08-2fae-4df5-9ec6-5ccf06d7e337", "external-id": "nsx-vlan-transportzone-309", "segmentation_id": 309, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2026016a-87", "ovs_interfaceid": "2026016a-87b1-42ae-a04f-d95c5fb37377", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1892.016283] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afb909bb-f357-4104-a443-c222dfb1e960 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.020167] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-382e14b9-e3ee-4bc1-8c02-a75981457d9e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.029598] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-acac08d9-89b6-4d95-b005-8df87420e172 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 2c623c00-92f2-4cc4-8503-963c3308d708] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1892.032412] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f90922ba-e739-4ec7-b110-e362d545dcfc {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.036390] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c281e2a-dd9c-4523-9451-0d899e9b4912 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.055695] env[62405]: DEBUG nova.compute.provider_tree [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1892.057606] env[62405]: DEBUG oslo_vmware.api [None req-acac08d9-89b6-4d95-b005-8df87420e172 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Waiting for the task: (returnval){ [ 1892.057606] env[62405]: value = "task-1947756" [ 1892.057606] env[62405]: _type = "Task" [ 1892.057606] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1892.067833] env[62405]: DEBUG oslo_vmware.api [None req-acac08d9-89b6-4d95-b005-8df87420e172 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1947756, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1892.081106] env[62405]: DEBUG nova.virt.hardware [None req-498743dc-41af-4c87-aa82-0c815e0fe830 tempest-ServersAdmin275Test-1825578340 tempest-ServersAdmin275Test-1825578340-project-admin] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1892.081512] env[62405]: DEBUG nova.virt.hardware [None req-498743dc-41af-4c87-aa82-0c815e0fe830 tempest-ServersAdmin275Test-1825578340 tempest-ServersAdmin275Test-1825578340-project-admin] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1892.081703] env[62405]: DEBUG nova.virt.hardware [None req-498743dc-41af-4c87-aa82-0c815e0fe830 tempest-ServersAdmin275Test-1825578340 tempest-ServersAdmin275Test-1825578340-project-admin] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1892.081898] env[62405]: DEBUG nova.virt.hardware [None req-498743dc-41af-4c87-aa82-0c815e0fe830 tempest-ServersAdmin275Test-1825578340 tempest-ServersAdmin275Test-1825578340-project-admin] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1892.082209] env[62405]: DEBUG nova.virt.hardware [None req-498743dc-41af-4c87-aa82-0c815e0fe830 tempest-ServersAdmin275Test-1825578340 tempest-ServersAdmin275Test-1825578340-project-admin] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1892.082419] env[62405]: DEBUG nova.virt.hardware [None req-498743dc-41af-4c87-aa82-0c815e0fe830 tempest-ServersAdmin275Test-1825578340 tempest-ServersAdmin275Test-1825578340-project-admin] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1892.082768] env[62405]: DEBUG nova.virt.hardware [None req-498743dc-41af-4c87-aa82-0c815e0fe830 tempest-ServersAdmin275Test-1825578340 tempest-ServersAdmin275Test-1825578340-project-admin] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1892.082973] env[62405]: DEBUG nova.virt.hardware [None req-498743dc-41af-4c87-aa82-0c815e0fe830 tempest-ServersAdmin275Test-1825578340 tempest-ServersAdmin275Test-1825578340-project-admin] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1892.083203] env[62405]: DEBUG nova.virt.hardware [None req-498743dc-41af-4c87-aa82-0c815e0fe830 tempest-ServersAdmin275Test-1825578340 tempest-ServersAdmin275Test-1825578340-project-admin] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1892.083350] env[62405]: DEBUG nova.virt.hardware [None req-498743dc-41af-4c87-aa82-0c815e0fe830 tempest-ServersAdmin275Test-1825578340 tempest-ServersAdmin275Test-1825578340-project-admin] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1892.083544] env[62405]: DEBUG nova.virt.hardware [None req-498743dc-41af-4c87-aa82-0c815e0fe830 tempest-ServersAdmin275Test-1825578340 tempest-ServersAdmin275Test-1825578340-project-admin] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1892.084909] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-120acebf-20ea-4f85-b1f0-167806ea059a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.093951] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c27781e-ecf7-44e9-b12c-055bd3e3c98b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.110808] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-498743dc-41af-4c87-aa82-0c815e0fe830 tempest-ServersAdmin275Test-1825578340 tempest-ServersAdmin275Test-1825578340-project-admin] [instance: 8f133517-cff2-40c7-8333-a9116163313a] Instance VIF info [] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1892.116549] env[62405]: DEBUG oslo.service.loopingcall [None req-498743dc-41af-4c87-aa82-0c815e0fe830 tempest-ServersAdmin275Test-1825578340 tempest-ServersAdmin275Test-1825578340-project-admin] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1892.117300] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8f133517-cff2-40c7-8333-a9116163313a] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1892.117578] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b901945f-b5e6-4baf-98ca-32e129f21a7f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.132656] env[62405]: DEBUG oslo_vmware.api [None req-2a99443a-7a55-4c6b-8c69-8838d0e318b7 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947752, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1892.138719] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1892.138719] env[62405]: value = "task-1947757" [ 1892.138719] env[62405]: _type = "Task" [ 1892.138719] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1892.148118] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947757, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1892.339315] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1892.373591] env[62405]: DEBUG oslo_vmware.api [None req-0d7e6b08-7f72-4b04-9aac-3f761d272373 tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] Task: {'id': task-1947755, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.243302} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1892.374730] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-0d7e6b08-7f72-4b04-9aac-3f761d272373 tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1892.375104] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-0d7e6b08-7f72-4b04-9aac-3f761d272373 tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] [instance: 4c8c0d2f-d8d3-4422-8a5c-8999636b22be] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1892.375471] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-0d7e6b08-7f72-4b04-9aac-3f761d272373 tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] [instance: 4c8c0d2f-d8d3-4422-8a5c-8999636b22be] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1892.375791] env[62405]: INFO nova.compute.manager [None req-0d7e6b08-7f72-4b04-9aac-3f761d272373 tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] [instance: 4c8c0d2f-d8d3-4422-8a5c-8999636b22be] Took 1.24 seconds to destroy the instance on the hypervisor. [ 1892.376203] env[62405]: DEBUG oslo.service.loopingcall [None req-0d7e6b08-7f72-4b04-9aac-3f761d272373 tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1892.377131] env[62405]: DEBUG nova.compute.manager [-] [instance: 4c8c0d2f-d8d3-4422-8a5c-8999636b22be] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1892.377269] env[62405]: DEBUG nova.network.neutron [-] [instance: 4c8c0d2f-d8d3-4422-8a5c-8999636b22be] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1892.524524] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Releasing lock "refresh_cache-c39d9059-8da4-4c8d-99ab-d66b8445e7da" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1892.524896] env[62405]: DEBUG nova.compute.manager [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Instance network_info: |[{"id": "2026016a-87b1-42ae-a04f-d95c5fb37377", "address": "fa:16:3e:bc:e8:85", "network": {"id": "3ca0a5a2-a18f-47fa-9d7b-0e27aa414878", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1312490542-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f3b50cc219314108945bfc8b2c21849a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7edb7c08-2fae-4df5-9ec6-5ccf06d7e337", "external-id": "nsx-vlan-transportzone-309", "segmentation_id": 309, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2026016a-87", "ovs_interfaceid": "2026016a-87b1-42ae-a04f-d95c5fb37377", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1892.525283] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bc:e8:85', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7edb7c08-2fae-4df5-9ec6-5ccf06d7e337', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2026016a-87b1-42ae-a04f-d95c5fb37377', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1892.533168] env[62405]: DEBUG oslo.service.loopingcall [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1892.533394] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1892.533628] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4caa7874-a57a-49d8-8ff7-09997c6c8b8d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.553840] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1892.553840] env[62405]: value = "task-1947758" [ 1892.553840] env[62405]: _type = "Task" [ 1892.553840] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1892.564157] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947758, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1892.571967] env[62405]: DEBUG oslo_vmware.api [None req-acac08d9-89b6-4d95-b005-8df87420e172 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1947756, 'name': PowerOffVM_Task, 'duration_secs': 0.184517} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1892.572257] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-acac08d9-89b6-4d95-b005-8df87420e172 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 2c623c00-92f2-4cc4-8503-963c3308d708] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1892.572531] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-acac08d9-89b6-4d95-b005-8df87420e172 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 2c623c00-92f2-4cc4-8503-963c3308d708] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1892.572737] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1e3a807d-1e4c-4d76-bd34-ced075ffe19b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.580762] env[62405]: ERROR nova.scheduler.client.report [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [req-fcf50e78-5c2f-4e75-a0e2-33554220740d] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7d5eded7-a501-4fa6-b1d3-60e273d555d7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-fcf50e78-5c2f-4e75-a0e2-33554220740d"}]} [ 1892.598419] env[62405]: DEBUG nova.scheduler.client.report [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Refreshing inventories for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1892.609542] env[62405]: DEBUG oslo_vmware.api [None req-2a99443a-7a55-4c6b-8c69-8838d0e318b7 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947752, 'name': RemoveSnapshot_Task, 'duration_secs': 1.075241} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1892.609542] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-2a99443a-7a55-4c6b-8c69-8838d0e318b7 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59fe34ab-c01d-4083-8bcd-ad6b4133a66f] Deleted Snapshot of the VM instance {{(pid=62405) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1892.617406] env[62405]: DEBUG nova.scheduler.client.report [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Updating ProviderTree inventory for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1892.617668] env[62405]: DEBUG nova.compute.provider_tree [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1892.645049] env[62405]: DEBUG nova.scheduler.client.report [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Refreshing aggregate associations for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7, aggregates: None {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1892.647966] env[62405]: DEBUG nova.compute.manager [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: 06dbb3e0-876e-4290-81f5-6f95f9d5cb37] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1892.657071] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947757, 'name': CreateVM_Task, 'duration_secs': 0.319165} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1892.657342] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8f133517-cff2-40c7-8333-a9116163313a] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1892.658033] env[62405]: DEBUG oslo_concurrency.lockutils [None req-498743dc-41af-4c87-aa82-0c815e0fe830 tempest-ServersAdmin275Test-1825578340 tempest-ServersAdmin275Test-1825578340-project-admin] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1892.658248] env[62405]: DEBUG oslo_concurrency.lockutils [None req-498743dc-41af-4c87-aa82-0c815e0fe830 tempest-ServersAdmin275Test-1825578340 tempest-ServersAdmin275Test-1825578340-project-admin] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1892.658588] env[62405]: DEBUG oslo_concurrency.lockutils [None req-498743dc-41af-4c87-aa82-0c815e0fe830 tempest-ServersAdmin275Test-1825578340 tempest-ServersAdmin275Test-1825578340-project-admin] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1892.658859] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a6de17a4-7b40-4a2d-801b-651e5872915d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.664912] env[62405]: DEBUG oslo_vmware.api [None req-498743dc-41af-4c87-aa82-0c815e0fe830 tempest-ServersAdmin275Test-1825578340 tempest-ServersAdmin275Test-1825578340-project-admin] Waiting for the task: (returnval){ [ 1892.664912] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52bc78f9-4ea4-c46e-eb6e-849f20e8a983" [ 1892.664912] env[62405]: _type = "Task" [ 1892.664912] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1892.673994] env[62405]: DEBUG oslo_vmware.api [None req-498743dc-41af-4c87-aa82-0c815e0fe830 tempest-ServersAdmin275Test-1825578340 tempest-ServersAdmin275Test-1825578340-project-admin] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52bc78f9-4ea4-c46e-eb6e-849f20e8a983, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1892.674899] env[62405]: DEBUG nova.scheduler.client.report [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Refreshing trait associations for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1892.685056] env[62405]: DEBUG nova.virt.hardware [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1892.685298] env[62405]: DEBUG nova.virt.hardware [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1892.685456] env[62405]: DEBUG nova.virt.hardware [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1892.685634] env[62405]: DEBUG nova.virt.hardware [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1892.685778] env[62405]: DEBUG nova.virt.hardware [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1892.685919] env[62405]: DEBUG nova.virt.hardware [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1892.686778] env[62405]: DEBUG nova.virt.hardware [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1892.686778] env[62405]: DEBUG nova.virt.hardware [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1892.686778] env[62405]: DEBUG nova.virt.hardware [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1892.686778] env[62405]: DEBUG nova.virt.hardware [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1892.686981] env[62405]: DEBUG nova.virt.hardware [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1892.687770] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9be5e23e-b034-41ae-8d79-27f3b2b9b098 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.696286] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6aaf18b-c5cb-47c2-9cb8-848891112ba6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.814436] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-acac08d9-89b6-4d95-b005-8df87420e172 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 2c623c00-92f2-4cc4-8503-963c3308d708] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1892.814656] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-acac08d9-89b6-4d95-b005-8df87420e172 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 2c623c00-92f2-4cc4-8503-963c3308d708] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1892.814840] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-acac08d9-89b6-4d95-b005-8df87420e172 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Deleting the datastore file [datastore1] 2c623c00-92f2-4cc4-8503-963c3308d708 {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1892.815121] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-68831f61-d048-4e11-a7bb-dd32e57074b0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.821782] env[62405]: DEBUG oslo_vmware.api [None req-acac08d9-89b6-4d95-b005-8df87420e172 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Waiting for the task: (returnval){ [ 1892.821782] env[62405]: value = "task-1947760" [ 1892.821782] env[62405]: _type = "Task" [ 1892.821782] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1892.833835] env[62405]: DEBUG oslo_vmware.api [None req-acac08d9-89b6-4d95-b005-8df87420e172 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1947760, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1892.858589] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a0a5dfdf-88bc-494a-976e-f5f89edf5182 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquiring lock "08d7be6c-0557-46af-ae8d-e1c68e878cae" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1892.858889] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a0a5dfdf-88bc-494a-976e-f5f89edf5182 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Lock "08d7be6c-0557-46af-ae8d-e1c68e878cae" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1892.859137] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a0a5dfdf-88bc-494a-976e-f5f89edf5182 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquiring lock "08d7be6c-0557-46af-ae8d-e1c68e878cae-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1892.859335] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a0a5dfdf-88bc-494a-976e-f5f89edf5182 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Lock "08d7be6c-0557-46af-ae8d-e1c68e878cae-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1892.859507] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a0a5dfdf-88bc-494a-976e-f5f89edf5182 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Lock "08d7be6c-0557-46af-ae8d-e1c68e878cae-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1892.862164] env[62405]: INFO nova.compute.manager [None req-a0a5dfdf-88bc-494a-976e-f5f89edf5182 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 08d7be6c-0557-46af-ae8d-e1c68e878cae] Terminating instance [ 1892.991287] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb76d749-6953-4c09-83eb-7c653b548553 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.999036] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-585b8f2b-29cf-47bf-b13e-e9be2e9dea93 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.031073] env[62405]: DEBUG nova.network.neutron [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: 06dbb3e0-876e-4290-81f5-6f95f9d5cb37] Successfully updated port: 339b22b0-3451-4284-a022-8823b059c08d {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1893.036326] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b458cde2-f78e-473e-aff0-81e63700fbcb {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.042319] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79e02860-9905-4183-8450-1973f6e23363 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.055216] env[62405]: DEBUG nova.compute.provider_tree [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1893.068836] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947758, 'name': CreateVM_Task} progress is 25%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1893.114103] env[62405]: WARNING nova.compute.manager [None req-2a99443a-7a55-4c6b-8c69-8838d0e318b7 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59fe34ab-c01d-4083-8bcd-ad6b4133a66f] Image not found during snapshot: nova.exception.ImageNotFound: Image b8f122c6-183d-49dc-b088-8f71ee372deb could not be found. [ 1893.175393] env[62405]: DEBUG oslo_vmware.api [None req-498743dc-41af-4c87-aa82-0c815e0fe830 tempest-ServersAdmin275Test-1825578340 tempest-ServersAdmin275Test-1825578340-project-admin] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52bc78f9-4ea4-c46e-eb6e-849f20e8a983, 'name': SearchDatastore_Task, 'duration_secs': 0.0177} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1893.175915] env[62405]: DEBUG oslo_concurrency.lockutils [None req-498743dc-41af-4c87-aa82-0c815e0fe830 tempest-ServersAdmin275Test-1825578340 tempest-ServersAdmin275Test-1825578340-project-admin] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1893.175969] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-498743dc-41af-4c87-aa82-0c815e0fe830 tempest-ServersAdmin275Test-1825578340 tempest-ServersAdmin275Test-1825578340-project-admin] [instance: 8f133517-cff2-40c7-8333-a9116163313a] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1893.176308] env[62405]: DEBUG oslo_concurrency.lockutils [None req-498743dc-41af-4c87-aa82-0c815e0fe830 tempest-ServersAdmin275Test-1825578340 tempest-ServersAdmin275Test-1825578340-project-admin] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1893.176385] env[62405]: DEBUG oslo_concurrency.lockutils [None req-498743dc-41af-4c87-aa82-0c815e0fe830 tempest-ServersAdmin275Test-1825578340 tempest-ServersAdmin275Test-1825578340-project-admin] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1893.176544] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-498743dc-41af-4c87-aa82-0c815e0fe830 tempest-ServersAdmin275Test-1825578340 tempest-ServersAdmin275Test-1825578340-project-admin] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1893.176820] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0a3f8c68-25d5-4598-b56e-b424d5db6129 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.186430] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-498743dc-41af-4c87-aa82-0c815e0fe830 tempest-ServersAdmin275Test-1825578340 tempest-ServersAdmin275Test-1825578340-project-admin] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1893.186623] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-498743dc-41af-4c87-aa82-0c815e0fe830 tempest-ServersAdmin275Test-1825578340 tempest-ServersAdmin275Test-1825578340-project-admin] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1893.187389] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0225b79e-d65f-4cf9-839c-8f3e7c3c87c7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.196934] env[62405]: DEBUG oslo_vmware.api [None req-498743dc-41af-4c87-aa82-0c815e0fe830 tempest-ServersAdmin275Test-1825578340 tempest-ServersAdmin275Test-1825578340-project-admin] Waiting for the task: (returnval){ [ 1893.196934] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52128269-8ca5-8dec-86e7-7fb4791cd47a" [ 1893.196934] env[62405]: _type = "Task" [ 1893.196934] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1893.203461] env[62405]: DEBUG nova.compute.manager [req-3d75c471-a2a3-4b5b-bad2-69abcc5fc284 req-612dccb1-190e-4f58-9a3a-b920b7eb0b19 service nova] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Received event network-changed-2026016a-87b1-42ae-a04f-d95c5fb37377 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1893.203664] env[62405]: DEBUG nova.compute.manager [req-3d75c471-a2a3-4b5b-bad2-69abcc5fc284 req-612dccb1-190e-4f58-9a3a-b920b7eb0b19 service nova] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Refreshing instance network info cache due to event network-changed-2026016a-87b1-42ae-a04f-d95c5fb37377. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1893.203879] env[62405]: DEBUG oslo_concurrency.lockutils [req-3d75c471-a2a3-4b5b-bad2-69abcc5fc284 req-612dccb1-190e-4f58-9a3a-b920b7eb0b19 service nova] Acquiring lock "refresh_cache-c39d9059-8da4-4c8d-99ab-d66b8445e7da" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1893.204230] env[62405]: DEBUG oslo_concurrency.lockutils [req-3d75c471-a2a3-4b5b-bad2-69abcc5fc284 req-612dccb1-190e-4f58-9a3a-b920b7eb0b19 service nova] Acquired lock "refresh_cache-c39d9059-8da4-4c8d-99ab-d66b8445e7da" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1893.204465] env[62405]: DEBUG nova.network.neutron [req-3d75c471-a2a3-4b5b-bad2-69abcc5fc284 req-612dccb1-190e-4f58-9a3a-b920b7eb0b19 service nova] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Refreshing network info cache for port 2026016a-87b1-42ae-a04f-d95c5fb37377 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1893.208711] env[62405]: DEBUG oslo_vmware.api [None req-498743dc-41af-4c87-aa82-0c815e0fe830 tempest-ServersAdmin275Test-1825578340 tempest-ServersAdmin275Test-1825578340-project-admin] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52128269-8ca5-8dec-86e7-7fb4791cd47a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1893.333778] env[62405]: DEBUG oslo_vmware.api [None req-acac08d9-89b6-4d95-b005-8df87420e172 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1947760, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.132789} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1893.334116] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-acac08d9-89b6-4d95-b005-8df87420e172 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1893.334346] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-acac08d9-89b6-4d95-b005-8df87420e172 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 2c623c00-92f2-4cc4-8503-963c3308d708] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1893.334746] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-acac08d9-89b6-4d95-b005-8df87420e172 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 2c623c00-92f2-4cc4-8503-963c3308d708] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1893.334979] env[62405]: INFO nova.compute.manager [None req-acac08d9-89b6-4d95-b005-8df87420e172 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 2c623c00-92f2-4cc4-8503-963c3308d708] Took 1.32 seconds to destroy the instance on the hypervisor. [ 1893.335278] env[62405]: DEBUG oslo.service.loopingcall [None req-acac08d9-89b6-4d95-b005-8df87420e172 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1893.335502] env[62405]: DEBUG nova.compute.manager [-] [instance: 2c623c00-92f2-4cc4-8503-963c3308d708] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1893.335698] env[62405]: DEBUG nova.network.neutron [-] [instance: 2c623c00-92f2-4cc4-8503-963c3308d708] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1893.365799] env[62405]: DEBUG nova.compute.manager [None req-a0a5dfdf-88bc-494a-976e-f5f89edf5182 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 08d7be6c-0557-46af-ae8d-e1c68e878cae] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1893.369018] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-a0a5dfdf-88bc-494a-976e-f5f89edf5182 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 08d7be6c-0557-46af-ae8d-e1c68e878cae] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1893.369018] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-613b3c6f-e995-4259-99e2-2ce272394fa0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.378875] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-a0a5dfdf-88bc-494a-976e-f5f89edf5182 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 08d7be6c-0557-46af-ae8d-e1c68e878cae] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1893.379368] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b7ed9ba1-a08e-4259-ae1a-fc62d79d4921 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.490343] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-a0a5dfdf-88bc-494a-976e-f5f89edf5182 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 08d7be6c-0557-46af-ae8d-e1c68e878cae] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1893.490343] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-a0a5dfdf-88bc-494a-976e-f5f89edf5182 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 08d7be6c-0557-46af-ae8d-e1c68e878cae] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1893.490343] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-a0a5dfdf-88bc-494a-976e-f5f89edf5182 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Deleting the datastore file [datastore1] 08d7be6c-0557-46af-ae8d-e1c68e878cae {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1893.490343] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-952308fc-684b-4c66-957f-6858e888b342 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.496202] env[62405]: DEBUG oslo_vmware.api [None req-a0a5dfdf-88bc-494a-976e-f5f89edf5182 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for the task: (returnval){ [ 1893.496202] env[62405]: value = "task-1947762" [ 1893.496202] env[62405]: _type = "Task" [ 1893.496202] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1893.505891] env[62405]: DEBUG oslo_vmware.api [None req-a0a5dfdf-88bc-494a-976e-f5f89edf5182 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947762, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1893.539578] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Acquiring lock "refresh_cache-06dbb3e0-876e-4290-81f5-6f95f9d5cb37" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1893.539578] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Acquired lock "refresh_cache-06dbb3e0-876e-4290-81f5-6f95f9d5cb37" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1893.539578] env[62405]: DEBUG nova.network.neutron [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: 06dbb3e0-876e-4290-81f5-6f95f9d5cb37] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1893.549425] env[62405]: DEBUG nova.compute.manager [req-aa809382-4df9-4f30-8e36-9453bab4ac19 req-8de3e340-e13c-4d47-8166-ea172148300c service nova] [instance: 4c8c0d2f-d8d3-4422-8a5c-8999636b22be] Received event network-vif-deleted-181e34ed-64d1-4e72-8ea6-a8e10f831868 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1893.549643] env[62405]: INFO nova.compute.manager [req-aa809382-4df9-4f30-8e36-9453bab4ac19 req-8de3e340-e13c-4d47-8166-ea172148300c service nova] [instance: 4c8c0d2f-d8d3-4422-8a5c-8999636b22be] Neutron deleted interface 181e34ed-64d1-4e72-8ea6-a8e10f831868; detaching it from the instance and deleting it from the info cache [ 1893.549879] env[62405]: DEBUG nova.network.neutron [req-aa809382-4df9-4f30-8e36-9453bab4ac19 req-8de3e340-e13c-4d47-8166-ea172148300c service nova] [instance: 4c8c0d2f-d8d3-4422-8a5c-8999636b22be] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1893.564392] env[62405]: DEBUG nova.scheduler.client.report [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1893.575946] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947758, 'name': CreateVM_Task, 'duration_secs': 0.760553} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1893.576148] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1893.576900] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1893.577096] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1893.577410] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1893.577673] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-56e24153-de91-456c-87b5-0ec0700c72c8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.582650] env[62405]: DEBUG oslo_vmware.api [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for the task: (returnval){ [ 1893.582650] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52c3894c-3ab0-a02a-2581-fc8257eb9b8e" [ 1893.582650] env[62405]: _type = "Task" [ 1893.582650] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1893.594357] env[62405]: DEBUG oslo_vmware.api [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52c3894c-3ab0-a02a-2581-fc8257eb9b8e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1893.712718] env[62405]: DEBUG oslo_vmware.api [None req-498743dc-41af-4c87-aa82-0c815e0fe830 tempest-ServersAdmin275Test-1825578340 tempest-ServersAdmin275Test-1825578340-project-admin] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52128269-8ca5-8dec-86e7-7fb4791cd47a, 'name': SearchDatastore_Task, 'duration_secs': 0.009423} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1893.714170] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-21f9c07c-8842-49b9-8f1b-4b02eefb5896 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.721026] env[62405]: DEBUG oslo_vmware.api [None req-498743dc-41af-4c87-aa82-0c815e0fe830 tempest-ServersAdmin275Test-1825578340 tempest-ServersAdmin275Test-1825578340-project-admin] Waiting for the task: (returnval){ [ 1893.721026] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52b42501-316d-a72c-5075-9bde92a9cdf5" [ 1893.721026] env[62405]: _type = "Task" [ 1893.721026] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1893.729789] env[62405]: DEBUG oslo_vmware.api [None req-498743dc-41af-4c87-aa82-0c815e0fe830 tempest-ServersAdmin275Test-1825578340 tempest-ServersAdmin275Test-1825578340-project-admin] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52b42501-316d-a72c-5075-9bde92a9cdf5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1893.901606] env[62405]: DEBUG nova.network.neutron [-] [instance: 4c8c0d2f-d8d3-4422-8a5c-8999636b22be] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1894.010226] env[62405]: DEBUG oslo_vmware.api [None req-a0a5dfdf-88bc-494a-976e-f5f89edf5182 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947762, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.152297} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1894.010611] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-a0a5dfdf-88bc-494a-976e-f5f89edf5182 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1894.010695] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-a0a5dfdf-88bc-494a-976e-f5f89edf5182 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 08d7be6c-0557-46af-ae8d-e1c68e878cae] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1894.011300] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-a0a5dfdf-88bc-494a-976e-f5f89edf5182 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 08d7be6c-0557-46af-ae8d-e1c68e878cae] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1894.011300] env[62405]: INFO nova.compute.manager [None req-a0a5dfdf-88bc-494a-976e-f5f89edf5182 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: 08d7be6c-0557-46af-ae8d-e1c68e878cae] Took 0.64 seconds to destroy the instance on the hypervisor. [ 1894.011400] env[62405]: DEBUG oslo.service.loopingcall [None req-a0a5dfdf-88bc-494a-976e-f5f89edf5182 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1894.011526] env[62405]: DEBUG nova.compute.manager [-] [instance: 08d7be6c-0557-46af-ae8d-e1c68e878cae] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1894.011669] env[62405]: DEBUG nova.network.neutron [-] [instance: 08d7be6c-0557-46af-ae8d-e1c68e878cae] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1894.056033] env[62405]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e505cb31-f3ac-444d-ad62-c753008508ec {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.065388] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-330c41b7-4c1e-421b-848a-585af40afd09 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.079547] env[62405]: DEBUG oslo_concurrency.lockutils [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.461s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1894.080160] env[62405]: DEBUG nova.compute.manager [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1894.085447] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c53a7ace-30cc-43ab-9679-2e54a8736aab tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.964s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1894.085447] env[62405]: INFO nova.compute.claims [None req-c53a7ace-30cc-43ab-9679-2e54a8736aab tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 41e5385d-f0c7-4431-8424-e60dbeebaf8e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1894.090905] env[62405]: DEBUG nova.network.neutron [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: 06dbb3e0-876e-4290-81f5-6f95f9d5cb37] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1894.112490] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9aabdcb4-ff70-48a4-a746-e13602763515 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Acquiring lock "59fe34ab-c01d-4083-8bcd-ad6b4133a66f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1894.113327] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9aabdcb4-ff70-48a4-a746-e13602763515 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Lock "59fe34ab-c01d-4083-8bcd-ad6b4133a66f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1894.113417] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9aabdcb4-ff70-48a4-a746-e13602763515 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Acquiring lock "59fe34ab-c01d-4083-8bcd-ad6b4133a66f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1894.113713] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9aabdcb4-ff70-48a4-a746-e13602763515 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Lock "59fe34ab-c01d-4083-8bcd-ad6b4133a66f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1894.113950] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9aabdcb4-ff70-48a4-a746-e13602763515 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Lock "59fe34ab-c01d-4083-8bcd-ad6b4133a66f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1894.118168] env[62405]: INFO nova.compute.manager [None req-9aabdcb4-ff70-48a4-a746-e13602763515 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59fe34ab-c01d-4083-8bcd-ad6b4133a66f] Terminating instance [ 1894.118948] env[62405]: DEBUG nova.compute.manager [req-aa809382-4df9-4f30-8e36-9453bab4ac19 req-8de3e340-e13c-4d47-8166-ea172148300c service nova] [instance: 4c8c0d2f-d8d3-4422-8a5c-8999636b22be] Detach interface failed, port_id=181e34ed-64d1-4e72-8ea6-a8e10f831868, reason: Instance 4c8c0d2f-d8d3-4422-8a5c-8999636b22be could not be found. {{(pid=62405) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11483}} [ 1894.120034] env[62405]: DEBUG nova.network.neutron [req-3d75c471-a2a3-4b5b-bad2-69abcc5fc284 req-612dccb1-190e-4f58-9a3a-b920b7eb0b19 service nova] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Updated VIF entry in instance network info cache for port 2026016a-87b1-42ae-a04f-d95c5fb37377. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1894.120363] env[62405]: DEBUG nova.network.neutron [req-3d75c471-a2a3-4b5b-bad2-69abcc5fc284 req-612dccb1-190e-4f58-9a3a-b920b7eb0b19 service nova] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Updating instance_info_cache with network_info: [{"id": "2026016a-87b1-42ae-a04f-d95c5fb37377", "address": "fa:16:3e:bc:e8:85", "network": {"id": "3ca0a5a2-a18f-47fa-9d7b-0e27aa414878", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1312490542-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f3b50cc219314108945bfc8b2c21849a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7edb7c08-2fae-4df5-9ec6-5ccf06d7e337", "external-id": "nsx-vlan-transportzone-309", "segmentation_id": 309, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2026016a-87", "ovs_interfaceid": "2026016a-87b1-42ae-a04f-d95c5fb37377", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1894.132126] env[62405]: DEBUG oslo_vmware.api [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52c3894c-3ab0-a02a-2581-fc8257eb9b8e, 'name': SearchDatastore_Task, 'duration_secs': 0.009013} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1894.134629] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1894.134629] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1894.134747] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1894.231846] env[62405]: DEBUG oslo_vmware.api [None req-498743dc-41af-4c87-aa82-0c815e0fe830 tempest-ServersAdmin275Test-1825578340 tempest-ServersAdmin275Test-1825578340-project-admin] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52b42501-316d-a72c-5075-9bde92a9cdf5, 'name': SearchDatastore_Task, 'duration_secs': 0.010532} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1894.232138] env[62405]: DEBUG oslo_concurrency.lockutils [None req-498743dc-41af-4c87-aa82-0c815e0fe830 tempest-ServersAdmin275Test-1825578340 tempest-ServersAdmin275Test-1825578340-project-admin] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1894.232402] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-498743dc-41af-4c87-aa82-0c815e0fe830 tempest-ServersAdmin275Test-1825578340 tempest-ServersAdmin275Test-1825578340-project-admin] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 8f133517-cff2-40c7-8333-a9116163313a/8f133517-cff2-40c7-8333-a9116163313a.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1894.232752] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1894.232855] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1894.233076] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-746df06c-00ba-4280-bb90-be1a1cad40ca {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.234832] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cb1ad6a1-9c18-4386-99b8-d9be1d728994 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.241890] env[62405]: DEBUG oslo_vmware.api [None req-498743dc-41af-4c87-aa82-0c815e0fe830 tempest-ServersAdmin275Test-1825578340 tempest-ServersAdmin275Test-1825578340-project-admin] Waiting for the task: (returnval){ [ 1894.241890] env[62405]: value = "task-1947763" [ 1894.241890] env[62405]: _type = "Task" [ 1894.241890] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1894.245529] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1894.245732] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1894.246730] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-63c99beb-18f9-4478-896d-05fde3775e60 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.252205] env[62405]: DEBUG oslo_vmware.api [None req-498743dc-41af-4c87-aa82-0c815e0fe830 tempest-ServersAdmin275Test-1825578340 tempest-ServersAdmin275Test-1825578340-project-admin] Task: {'id': task-1947763, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1894.255236] env[62405]: DEBUG oslo_vmware.api [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for the task: (returnval){ [ 1894.255236] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52864ee4-1895-4e91-a645-9d56e3ee9712" [ 1894.255236] env[62405]: _type = "Task" [ 1894.255236] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1894.262914] env[62405]: DEBUG oslo_vmware.api [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52864ee4-1895-4e91-a645-9d56e3ee9712, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1894.295155] env[62405]: DEBUG nova.network.neutron [-] [instance: 2c623c00-92f2-4cc4-8503-963c3308d708] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1894.332928] env[62405]: DEBUG nova.network.neutron [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: 06dbb3e0-876e-4290-81f5-6f95f9d5cb37] Updating instance_info_cache with network_info: [{"id": "339b22b0-3451-4284-a022-8823b059c08d", "address": "fa:16:3e:c8:a8:fd", "network": {"id": "e785f241-c0f9-4e7b-978a-316f93e62a7a", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-442287566-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a949e89f885745acb15d0afd4893ce68", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cf63c3c8-d774-4b81-9b12-848612a96076", "external-id": "nsx-vlan-transportzone-315", "segmentation_id": 315, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap339b22b0-34", "ovs_interfaceid": "339b22b0-3451-4284-a022-8823b059c08d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1894.406393] env[62405]: INFO nova.compute.manager [-] [instance: 4c8c0d2f-d8d3-4422-8a5c-8999636b22be] Took 2.03 seconds to deallocate network for instance. [ 1894.595037] env[62405]: DEBUG nova.compute.utils [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1894.595037] env[62405]: DEBUG nova.compute.manager [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1894.595037] env[62405]: DEBUG nova.network.neutron [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1894.623525] env[62405]: DEBUG oslo_concurrency.lockutils [req-3d75c471-a2a3-4b5b-bad2-69abcc5fc284 req-612dccb1-190e-4f58-9a3a-b920b7eb0b19 service nova] Releasing lock "refresh_cache-c39d9059-8da4-4c8d-99ab-d66b8445e7da" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1894.630228] env[62405]: DEBUG nova.compute.manager [None req-9aabdcb4-ff70-48a4-a746-e13602763515 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59fe34ab-c01d-4083-8bcd-ad6b4133a66f] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1894.630228] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-9aabdcb4-ff70-48a4-a746-e13602763515 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59fe34ab-c01d-4083-8bcd-ad6b4133a66f] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1894.630228] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62a2d838-059f-4cca-87e9-cbf8befb1dfe {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.636980] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-9aabdcb4-ff70-48a4-a746-e13602763515 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59fe34ab-c01d-4083-8bcd-ad6b4133a66f] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1894.637512] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e210b896-74b8-404f-a58f-91debbaa2a83 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.645142] env[62405]: DEBUG nova.policy [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '74ccaab252cb403bb54364c35d6dcbd4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1d2ff9a8cb1840889a4a2a87c663f59e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1894.646502] env[62405]: DEBUG oslo_vmware.api [None req-9aabdcb4-ff70-48a4-a746-e13602763515 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Waiting for the task: (returnval){ [ 1894.646502] env[62405]: value = "task-1947764" [ 1894.646502] env[62405]: _type = "Task" [ 1894.646502] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1894.655723] env[62405]: DEBUG oslo_vmware.api [None req-9aabdcb4-ff70-48a4-a746-e13602763515 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947764, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1894.753628] env[62405]: DEBUG oslo_vmware.api [None req-498743dc-41af-4c87-aa82-0c815e0fe830 tempest-ServersAdmin275Test-1825578340 tempest-ServersAdmin275Test-1825578340-project-admin] Task: {'id': task-1947763, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1894.764593] env[62405]: DEBUG oslo_vmware.api [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52864ee4-1895-4e91-a645-9d56e3ee9712, 'name': SearchDatastore_Task, 'duration_secs': 0.00797} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1894.765371] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-95d65d56-fa8b-4c92-9c5e-919008d397fb {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.770591] env[62405]: DEBUG oslo_vmware.api [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for the task: (returnval){ [ 1894.770591] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]523c09c9-e893-9e70-77dd-cd4d6a8c3bcc" [ 1894.770591] env[62405]: _type = "Task" [ 1894.770591] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1894.778082] env[62405]: DEBUG oslo_vmware.api [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]523c09c9-e893-9e70-77dd-cd4d6a8c3bcc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1894.799708] env[62405]: INFO nova.compute.manager [-] [instance: 2c623c00-92f2-4cc4-8503-963c3308d708] Took 1.46 seconds to deallocate network for instance. [ 1894.837813] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Releasing lock "refresh_cache-06dbb3e0-876e-4290-81f5-6f95f9d5cb37" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1894.837813] env[62405]: DEBUG nova.compute.manager [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: 06dbb3e0-876e-4290-81f5-6f95f9d5cb37] Instance network_info: |[{"id": "339b22b0-3451-4284-a022-8823b059c08d", "address": "fa:16:3e:c8:a8:fd", "network": {"id": "e785f241-c0f9-4e7b-978a-316f93e62a7a", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-442287566-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a949e89f885745acb15d0afd4893ce68", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cf63c3c8-d774-4b81-9b12-848612a96076", "external-id": "nsx-vlan-transportzone-315", "segmentation_id": 315, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap339b22b0-34", "ovs_interfaceid": "339b22b0-3451-4284-a022-8823b059c08d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1894.838129] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: 06dbb3e0-876e-4290-81f5-6f95f9d5cb37] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c8:a8:fd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cf63c3c8-d774-4b81-9b12-848612a96076', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '339b22b0-3451-4284-a022-8823b059c08d', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1894.845491] env[62405]: DEBUG oslo.service.loopingcall [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1894.845735] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 06dbb3e0-876e-4290-81f5-6f95f9d5cb37] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1894.845964] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-65ae880d-a50f-4210-9b18-cbfeed73b0b9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.866048] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1894.866048] env[62405]: value = "task-1947765" [ 1894.866048] env[62405]: _type = "Task" [ 1894.866048] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1894.873354] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947765, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1894.915136] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0d7e6b08-7f72-4b04-9aac-3f761d272373 tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1894.928877] env[62405]: DEBUG nova.network.neutron [-] [instance: 08d7be6c-0557-46af-ae8d-e1c68e878cae] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1895.159427] env[62405]: DEBUG oslo_vmware.api [None req-9aabdcb4-ff70-48a4-a746-e13602763515 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947764, 'name': PowerOffVM_Task, 'duration_secs': 0.246915} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1895.159914] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-9aabdcb4-ff70-48a4-a746-e13602763515 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59fe34ab-c01d-4083-8bcd-ad6b4133a66f] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1895.160122] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-9aabdcb4-ff70-48a4-a746-e13602763515 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59fe34ab-c01d-4083-8bcd-ad6b4133a66f] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1895.160380] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-091a6d37-37f0-4cf7-ae98-bddfe962e9ec {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.251916] env[62405]: DEBUG oslo_vmware.api [None req-498743dc-41af-4c87-aa82-0c815e0fe830 tempest-ServersAdmin275Test-1825578340 tempest-ServersAdmin275Test-1825578340-project-admin] Task: {'id': task-1947763, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.514327} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1895.254314] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-498743dc-41af-4c87-aa82-0c815e0fe830 tempest-ServersAdmin275Test-1825578340 tempest-ServersAdmin275Test-1825578340-project-admin] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 8f133517-cff2-40c7-8333-a9116163313a/8f133517-cff2-40c7-8333-a9116163313a.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1895.254532] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-498743dc-41af-4c87-aa82-0c815e0fe830 tempest-ServersAdmin275Test-1825578340 tempest-ServersAdmin275Test-1825578340-project-admin] [instance: 8f133517-cff2-40c7-8333-a9116163313a] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1895.254945] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-df3756d9-833a-46f9-a9ed-8d50f67a57f3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.260898] env[62405]: DEBUG oslo_vmware.api [None req-498743dc-41af-4c87-aa82-0c815e0fe830 tempest-ServersAdmin275Test-1825578340 tempest-ServersAdmin275Test-1825578340-project-admin] Waiting for the task: (returnval){ [ 1895.260898] env[62405]: value = "task-1947767" [ 1895.260898] env[62405]: _type = "Task" [ 1895.260898] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1895.270678] env[62405]: DEBUG oslo_vmware.api [None req-498743dc-41af-4c87-aa82-0c815e0fe830 tempest-ServersAdmin275Test-1825578340 tempest-ServersAdmin275Test-1825578340-project-admin] Task: {'id': task-1947767, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1895.280220] env[62405]: DEBUG oslo_vmware.api [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]523c09c9-e893-9e70-77dd-cd4d6a8c3bcc, 'name': SearchDatastore_Task, 'duration_secs': 0.017705} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1895.280484] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1895.280735] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] c39d9059-8da4-4c8d-99ab-d66b8445e7da/c39d9059-8da4-4c8d-99ab-d66b8445e7da.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1895.280979] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-74af2384-2e61-43f5-8e34-f358e04451f0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.288978] env[62405]: DEBUG oslo_vmware.api [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for the task: (returnval){ [ 1895.288978] env[62405]: value = "task-1947768" [ 1895.288978] env[62405]: _type = "Task" [ 1895.288978] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1895.296178] env[62405]: DEBUG oslo_vmware.api [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1947768, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1895.355053] env[62405]: DEBUG nova.compute.manager [req-536089fe-6d90-41d0-ba2c-77c2b4d5386a req-5bc6a920-a5f9-4b4d-a97a-8b555198df5f service nova] [instance: 06dbb3e0-876e-4290-81f5-6f95f9d5cb37] Received event network-vif-plugged-339b22b0-3451-4284-a022-8823b059c08d {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1895.355319] env[62405]: DEBUG oslo_concurrency.lockutils [req-536089fe-6d90-41d0-ba2c-77c2b4d5386a req-5bc6a920-a5f9-4b4d-a97a-8b555198df5f service nova] Acquiring lock "06dbb3e0-876e-4290-81f5-6f95f9d5cb37-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1895.355548] env[62405]: DEBUG oslo_concurrency.lockutils [req-536089fe-6d90-41d0-ba2c-77c2b4d5386a req-5bc6a920-a5f9-4b4d-a97a-8b555198df5f service nova] Lock "06dbb3e0-876e-4290-81f5-6f95f9d5cb37-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1895.355752] env[62405]: DEBUG oslo_concurrency.lockutils [req-536089fe-6d90-41d0-ba2c-77c2b4d5386a req-5bc6a920-a5f9-4b4d-a97a-8b555198df5f service nova] Lock "06dbb3e0-876e-4290-81f5-6f95f9d5cb37-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1895.355912] env[62405]: DEBUG nova.compute.manager [req-536089fe-6d90-41d0-ba2c-77c2b4d5386a req-5bc6a920-a5f9-4b4d-a97a-8b555198df5f service nova] [instance: 06dbb3e0-876e-4290-81f5-6f95f9d5cb37] No waiting events found dispatching network-vif-plugged-339b22b0-3451-4284-a022-8823b059c08d {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1895.356114] env[62405]: WARNING nova.compute.manager [req-536089fe-6d90-41d0-ba2c-77c2b4d5386a req-5bc6a920-a5f9-4b4d-a97a-8b555198df5f service nova] [instance: 06dbb3e0-876e-4290-81f5-6f95f9d5cb37] Received unexpected event network-vif-plugged-339b22b0-3451-4284-a022-8823b059c08d for instance with vm_state building and task_state spawning. [ 1895.356285] env[62405]: DEBUG nova.compute.manager [req-536089fe-6d90-41d0-ba2c-77c2b4d5386a req-5bc6a920-a5f9-4b4d-a97a-8b555198df5f service nova] [instance: 06dbb3e0-876e-4290-81f5-6f95f9d5cb37] Received event network-changed-339b22b0-3451-4284-a022-8823b059c08d {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1895.356438] env[62405]: DEBUG nova.compute.manager [req-536089fe-6d90-41d0-ba2c-77c2b4d5386a req-5bc6a920-a5f9-4b4d-a97a-8b555198df5f service nova] [instance: 06dbb3e0-876e-4290-81f5-6f95f9d5cb37] Refreshing instance network info cache due to event network-changed-339b22b0-3451-4284-a022-8823b059c08d. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1895.356640] env[62405]: DEBUG oslo_concurrency.lockutils [req-536089fe-6d90-41d0-ba2c-77c2b4d5386a req-5bc6a920-a5f9-4b4d-a97a-8b555198df5f service nova] Acquiring lock "refresh_cache-06dbb3e0-876e-4290-81f5-6f95f9d5cb37" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1895.356946] env[62405]: DEBUG oslo_concurrency.lockutils [req-536089fe-6d90-41d0-ba2c-77c2b4d5386a req-5bc6a920-a5f9-4b4d-a97a-8b555198df5f service nova] Acquired lock "refresh_cache-06dbb3e0-876e-4290-81f5-6f95f9d5cb37" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1895.356946] env[62405]: DEBUG nova.network.neutron [req-536089fe-6d90-41d0-ba2c-77c2b4d5386a req-5bc6a920-a5f9-4b4d-a97a-8b555198df5f service nova] [instance: 06dbb3e0-876e-4290-81f5-6f95f9d5cb37] Refreshing network info cache for port 339b22b0-3451-4284-a022-8823b059c08d {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1895.371836] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16692d83-b89e-4fc5-9271-f11fd47e4f06 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.379772] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947765, 'name': CreateVM_Task, 'duration_secs': 0.373647} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1895.381493] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 06dbb3e0-876e-4290-81f5-6f95f9d5cb37] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1895.382225] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1895.382386] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1895.382702] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1895.383672] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d06d9358-2f4d-4844-8da3-35772c9fb28c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.386608] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ce023cc1-fdcf-45fa-927d-e631c7b838e5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.392324] env[62405]: DEBUG oslo_vmware.api [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Waiting for the task: (returnval){ [ 1895.392324] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]521f432b-9127-82ba-8f97-8f365e63f62b" [ 1895.392324] env[62405]: _type = "Task" [ 1895.392324] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1895.422618] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adde9bf4-d0db-4a79-9c16-95b42f55596a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.424819] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-9aabdcb4-ff70-48a4-a746-e13602763515 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59fe34ab-c01d-4083-8bcd-ad6b4133a66f] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1895.425060] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-9aabdcb4-ff70-48a4-a746-e13602763515 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59fe34ab-c01d-4083-8bcd-ad6b4133a66f] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1895.425246] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-9aabdcb4-ff70-48a4-a746-e13602763515 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Deleting the datastore file [datastore1] 59fe34ab-c01d-4083-8bcd-ad6b4133a66f {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1895.425760] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8a5cf194-56b4-47bd-8ef4-0a2451ac4fd6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.434489] env[62405]: DEBUG oslo_vmware.api [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]521f432b-9127-82ba-8f97-8f365e63f62b, 'name': SearchDatastore_Task, 'duration_secs': 0.015946} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1895.437446] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1895.437756] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: 06dbb3e0-876e-4290-81f5-6f95f9d5cb37] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1895.437998] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1895.438172] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1895.438355] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1895.438683] env[62405]: DEBUG oslo_vmware.api [None req-9aabdcb4-ff70-48a4-a746-e13602763515 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Waiting for the task: (returnval){ [ 1895.438683] env[62405]: value = "task-1947769" [ 1895.438683] env[62405]: _type = "Task" [ 1895.438683] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1895.438935] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-add5c769-d3f5-4b3e-9857-a2d45517f1af {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.441624] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a88210e-f7e7-4837-b0fe-99d610eae6f4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.453435] env[62405]: DEBUG oslo_vmware.api [None req-9aabdcb4-ff70-48a4-a746-e13602763515 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947769, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1895.461981] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1895.462232] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1895.463140] env[62405]: DEBUG nova.compute.provider_tree [None req-c53a7ace-30cc-43ab-9679-2e54a8736aab tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1895.464715] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d68dbb06-f606-4fbe-91e0-48b2824b2db9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.470393] env[62405]: DEBUG oslo_vmware.api [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Waiting for the task: (returnval){ [ 1895.470393] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52f57a1a-eb07-271d-a4e1-5dff9056656f" [ 1895.470393] env[62405]: _type = "Task" [ 1895.470393] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1895.478049] env[62405]: DEBUG oslo_vmware.api [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52f57a1a-eb07-271d-a4e1-5dff9056656f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1895.770948] env[62405]: DEBUG oslo_vmware.api [None req-498743dc-41af-4c87-aa82-0c815e0fe830 tempest-ServersAdmin275Test-1825578340 tempest-ServersAdmin275Test-1825578340-project-admin] Task: {'id': task-1947767, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065668} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1895.771264] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-498743dc-41af-4c87-aa82-0c815e0fe830 tempest-ServersAdmin275Test-1825578340 tempest-ServersAdmin275Test-1825578340-project-admin] [instance: 8f133517-cff2-40c7-8333-a9116163313a] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1895.771994] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfe551ae-3aac-4b62-bc3b-aac0969e3400 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.791323] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-498743dc-41af-4c87-aa82-0c815e0fe830 tempest-ServersAdmin275Test-1825578340 tempest-ServersAdmin275Test-1825578340-project-admin] [instance: 8f133517-cff2-40c7-8333-a9116163313a] Reconfiguring VM instance instance-00000054 to attach disk [datastore1] 8f133517-cff2-40c7-8333-a9116163313a/8f133517-cff2-40c7-8333-a9116163313a.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1895.791642] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-82a24c59-5f87-46d1-9ea9-224b4a068b97 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.815182] env[62405]: DEBUG oslo_vmware.api [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1947768, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1895.816436] env[62405]: DEBUG oslo_vmware.api [None req-498743dc-41af-4c87-aa82-0c815e0fe830 tempest-ServersAdmin275Test-1825578340 tempest-ServersAdmin275Test-1825578340-project-admin] Waiting for the task: (returnval){ [ 1895.816436] env[62405]: value = "task-1947770" [ 1895.816436] env[62405]: _type = "Task" [ 1895.816436] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1895.824067] env[62405]: DEBUG oslo_vmware.api [None req-498743dc-41af-4c87-aa82-0c815e0fe830 tempest-ServersAdmin275Test-1825578340 tempest-ServersAdmin275Test-1825578340-project-admin] Task: {'id': task-1947770, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1895.952492] env[62405]: DEBUG oslo_vmware.api [None req-9aabdcb4-ff70-48a4-a746-e13602763515 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Task: {'id': task-1947769, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.482828} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1895.952795] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-9aabdcb4-ff70-48a4-a746-e13602763515 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1895.952991] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-9aabdcb4-ff70-48a4-a746-e13602763515 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59fe34ab-c01d-4083-8bcd-ad6b4133a66f] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1895.953184] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-9aabdcb4-ff70-48a4-a746-e13602763515 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59fe34ab-c01d-4083-8bcd-ad6b4133a66f] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1895.953378] env[62405]: INFO nova.compute.manager [None req-9aabdcb4-ff70-48a4-a746-e13602763515 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] [instance: 59fe34ab-c01d-4083-8bcd-ad6b4133a66f] Took 1.33 seconds to destroy the instance on the hypervisor. [ 1895.953635] env[62405]: DEBUG oslo.service.loopingcall [None req-9aabdcb4-ff70-48a4-a746-e13602763515 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1895.953843] env[62405]: DEBUG nova.compute.manager [-] [instance: 59fe34ab-c01d-4083-8bcd-ad6b4133a66f] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1895.953950] env[62405]: DEBUG nova.network.neutron [-] [instance: 59fe34ab-c01d-4083-8bcd-ad6b4133a66f] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1895.967713] env[62405]: DEBUG nova.scheduler.client.report [None req-c53a7ace-30cc-43ab-9679-2e54a8736aab tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1895.982768] env[62405]: DEBUG oslo_vmware.api [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52f57a1a-eb07-271d-a4e1-5dff9056656f, 'name': SearchDatastore_Task, 'duration_secs': 0.008612} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1895.983555] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f04abb67-5ef7-41f6-8d2c-6a18261c304e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.988709] env[62405]: DEBUG oslo_vmware.api [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Waiting for the task: (returnval){ [ 1895.988709] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52fdd0b8-d2de-1c5e-b4b4-2f0b82ac7626" [ 1895.988709] env[62405]: _type = "Task" [ 1895.988709] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1895.998481] env[62405]: DEBUG oslo_vmware.api [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52fdd0b8-d2de-1c5e-b4b4-2f0b82ac7626, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1896.074781] env[62405]: DEBUG nova.network.neutron [req-536089fe-6d90-41d0-ba2c-77c2b4d5386a req-5bc6a920-a5f9-4b4d-a97a-8b555198df5f service nova] [instance: 06dbb3e0-876e-4290-81f5-6f95f9d5cb37] Updated VIF entry in instance network info cache for port 339b22b0-3451-4284-a022-8823b059c08d. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1896.075174] env[62405]: DEBUG nova.network.neutron [req-536089fe-6d90-41d0-ba2c-77c2b4d5386a req-5bc6a920-a5f9-4b4d-a97a-8b555198df5f service nova] [instance: 06dbb3e0-876e-4290-81f5-6f95f9d5cb37] Updating instance_info_cache with network_info: [{"id": "339b22b0-3451-4284-a022-8823b059c08d", "address": "fa:16:3e:c8:a8:fd", "network": {"id": "e785f241-c0f9-4e7b-978a-316f93e62a7a", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-442287566-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a949e89f885745acb15d0afd4893ce68", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cf63c3c8-d774-4b81-9b12-848612a96076", "external-id": "nsx-vlan-transportzone-315", "segmentation_id": 315, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap339b22b0-34", "ovs_interfaceid": "339b22b0-3451-4284-a022-8823b059c08d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1897.730789] env[62405]: DEBUG oslo_vmware.api [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1947768, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.553586} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1897.730789] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] c39d9059-8da4-4c8d-99ab-d66b8445e7da/c39d9059-8da4-4c8d-99ab-d66b8445e7da.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1897.730789] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1897.730789] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-410677fc-5612-46cc-87f7-2b7c0d568763 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.730789] env[62405]: DEBUG oslo_vmware.api [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for the task: (returnval){ [ 1897.730789] env[62405]: value = "task-1947771" [ 1897.730789] env[62405]: _type = "Task" [ 1897.730789] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1897.730789] env[62405]: DEBUG oslo_vmware.api [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1947771, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1897.730789] env[62405]: DEBUG oslo_vmware.api [None req-498743dc-41af-4c87-aa82-0c815e0fe830 tempest-ServersAdmin275Test-1825578340 tempest-ServersAdmin275Test-1825578340-project-admin] Task: {'id': task-1947770, 'name': ReconfigVM_Task, 'duration_secs': 0.248747} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1897.730789] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-498743dc-41af-4c87-aa82-0c815e0fe830 tempest-ServersAdmin275Test-1825578340 tempest-ServersAdmin275Test-1825578340-project-admin] [instance: 8f133517-cff2-40c7-8333-a9116163313a] Reconfigured VM instance instance-00000054 to attach disk [datastore1] 8f133517-cff2-40c7-8333-a9116163313a/8f133517-cff2-40c7-8333-a9116163313a.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1897.730789] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8aed6191-4aa1-479b-b0c4-48eec7b1e541 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.730789] env[62405]: DEBUG oslo_vmware.api [None req-498743dc-41af-4c87-aa82-0c815e0fe830 tempest-ServersAdmin275Test-1825578340 tempest-ServersAdmin275Test-1825578340-project-admin] Waiting for the task: (returnval){ [ 1897.730789] env[62405]: value = "task-1947772" [ 1897.730789] env[62405]: _type = "Task" [ 1897.730789] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1897.730789] env[62405]: DEBUG oslo_vmware.api [None req-498743dc-41af-4c87-aa82-0c815e0fe830 tempest-ServersAdmin275Test-1825578340 tempest-ServersAdmin275Test-1825578340-project-admin] Task: {'id': task-1947772, 'name': Rename_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1897.730789] env[62405]: DEBUG oslo_vmware.api [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52fdd0b8-d2de-1c5e-b4b4-2f0b82ac7626, 'name': SearchDatastore_Task, 'duration_secs': 0.015899} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1897.730789] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1897.730789] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 06dbb3e0-876e-4290-81f5-6f95f9d5cb37/06dbb3e0-876e-4290-81f5-6f95f9d5cb37.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1897.732816] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-11d05b8f-cfc7-4714-b830-4ec3b7920d3a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.732816] env[62405]: DEBUG oslo_vmware.api [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Waiting for the task: (returnval){ [ 1897.732816] env[62405]: value = "task-1947773" [ 1897.732816] env[62405]: _type = "Task" [ 1897.732816] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1897.732816] env[62405]: DEBUG oslo_vmware.api [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': task-1947773, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1897.732816] env[62405]: DEBUG oslo_concurrency.lockutils [req-536089fe-6d90-41d0-ba2c-77c2b4d5386a req-5bc6a920-a5f9-4b4d-a97a-8b555198df5f service nova] Releasing lock "refresh_cache-06dbb3e0-876e-4290-81f5-6f95f9d5cb37" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1897.732816] env[62405]: DEBUG nova.compute.manager [req-536089fe-6d90-41d0-ba2c-77c2b4d5386a req-5bc6a920-a5f9-4b4d-a97a-8b555198df5f service nova] [instance: 2c623c00-92f2-4cc4-8503-963c3308d708] Received event network-vif-deleted-3cb3354b-4416-4325-9602-8abc5afe9861 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1897.732816] env[62405]: DEBUG nova.compute.manager [req-536089fe-6d90-41d0-ba2c-77c2b4d5386a req-5bc6a920-a5f9-4b4d-a97a-8b555198df5f service nova] [instance: 08d7be6c-0557-46af-ae8d-e1c68e878cae] Received event network-vif-deleted-906bdab0-cfcb-43b1-8d01-63587d844b97 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1897.732816] env[62405]: DEBUG oslo_vmware.api [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1947771, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072191} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1897.732816] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1897.732816] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddccea3c-e4fc-449f-81f2-187ab277de4b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.732816] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Reconfiguring VM instance instance-00000057 to attach disk [datastore1] c39d9059-8da4-4c8d-99ab-d66b8445e7da/c39d9059-8da4-4c8d-99ab-d66b8445e7da.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1897.732816] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c8b81864-aa7b-4ad0-b3f3-69bb502085c5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.732816] env[62405]: DEBUG oslo_vmware.api [None req-498743dc-41af-4c87-aa82-0c815e0fe830 tempest-ServersAdmin275Test-1825578340 tempest-ServersAdmin275Test-1825578340-project-admin] Task: {'id': task-1947772, 'name': Rename_Task, 'duration_secs': 0.136702} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1897.732816] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-498743dc-41af-4c87-aa82-0c815e0fe830 tempest-ServersAdmin275Test-1825578340 tempest-ServersAdmin275Test-1825578340-project-admin] [instance: 8f133517-cff2-40c7-8333-a9116163313a] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1897.732816] env[62405]: DEBUG oslo_vmware.api [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for the task: (returnval){ [ 1897.732816] env[62405]: value = "task-1947774" [ 1897.732816] env[62405]: _type = "Task" [ 1897.732816] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1897.732816] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b130ebac-0275-4941-8a00-2383e034b9f1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.732816] env[62405]: DEBUG oslo_vmware.api [None req-498743dc-41af-4c87-aa82-0c815e0fe830 tempest-ServersAdmin275Test-1825578340 tempest-ServersAdmin275Test-1825578340-project-admin] Waiting for the task: (returnval){ [ 1897.732816] env[62405]: value = "task-1947775" [ 1897.732816] env[62405]: _type = "Task" [ 1897.732816] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1897.734119] env[62405]: DEBUG oslo_vmware.api [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1947774, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1897.734119] env[62405]: DEBUG oslo_vmware.api [None req-498743dc-41af-4c87-aa82-0c815e0fe830 tempest-ServersAdmin275Test-1825578340 tempest-ServersAdmin275Test-1825578340-project-admin] Task: {'id': task-1947775, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1897.734119] env[62405]: DEBUG oslo_vmware.api [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': task-1947773, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.466491} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1897.734119] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 06dbb3e0-876e-4290-81f5-6f95f9d5cb37/06dbb3e0-876e-4290-81f5-6f95f9d5cb37.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1897.734119] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: 06dbb3e0-876e-4290-81f5-6f95f9d5cb37] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1897.734119] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-502d16ef-ed52-46cb-a186-f44cd9cd5bdd {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.734119] env[62405]: DEBUG oslo_vmware.api [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Waiting for the task: (returnval){ [ 1897.734119] env[62405]: value = "task-1947776" [ 1897.734119] env[62405]: _type = "Task" [ 1897.734119] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1897.734119] env[62405]: DEBUG oslo_vmware.api [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': task-1947776, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1897.734119] env[62405]: DEBUG oslo_vmware.api [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1947774, 'name': ReconfigVM_Task, 'duration_secs': 0.299659} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1897.734119] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Reconfigured VM instance instance-00000057 to attach disk [datastore1] c39d9059-8da4-4c8d-99ab-d66b8445e7da/c39d9059-8da4-4c8d-99ab-d66b8445e7da.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1897.734119] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-44099004-a05f-479b-8339-14f169b76f81 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.734119] env[62405]: DEBUG oslo_vmware.api [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for the task: (returnval){ [ 1897.734119] env[62405]: value = "task-1947777" [ 1897.734119] env[62405]: _type = "Task" [ 1897.734119] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1897.734119] env[62405]: DEBUG oslo_vmware.api [None req-498743dc-41af-4c87-aa82-0c815e0fe830 tempest-ServersAdmin275Test-1825578340 tempest-ServersAdmin275Test-1825578340-project-admin] Task: {'id': task-1947775, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1897.734119] env[62405]: DEBUG oslo_vmware.api [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1947777, 'name': Rename_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1897.734119] env[62405]: DEBUG oslo_vmware.api [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': task-1947776, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.059916} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1897.735216] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: 06dbb3e0-876e-4290-81f5-6f95f9d5cb37] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1897.735216] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3a37f87-89b6-4cc7-88eb-f49e5ccc9f45 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.735216] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: 06dbb3e0-876e-4290-81f5-6f95f9d5cb37] Reconfiguring VM instance instance-00000058 to attach disk [datastore1] 06dbb3e0-876e-4290-81f5-6f95f9d5cb37/06dbb3e0-876e-4290-81f5-6f95f9d5cb37.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1897.735216] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f31bc6f8-ad6b-499f-acde-1b8232ed8bcf {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.735216] env[62405]: DEBUG oslo_vmware.api [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Waiting for the task: (returnval){ [ 1897.735216] env[62405]: value = "task-1947778" [ 1897.735216] env[62405]: _type = "Task" [ 1897.735216] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1897.735216] env[62405]: DEBUG oslo_vmware.api [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': task-1947778, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1897.896669] env[62405]: DEBUG oslo_vmware.api [None req-498743dc-41af-4c87-aa82-0c815e0fe830 tempest-ServersAdmin275Test-1825578340 tempest-ServersAdmin275Test-1825578340-project-admin] Task: {'id': task-1947775, 'name': PowerOnVM_Task, 'duration_secs': 0.629341} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1897.900707] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-498743dc-41af-4c87-aa82-0c815e0fe830 tempest-ServersAdmin275Test-1825578340 tempest-ServersAdmin275Test-1825578340-project-admin] [instance: 8f133517-cff2-40c7-8333-a9116163313a] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1897.900935] env[62405]: DEBUG nova.compute.manager [None req-498743dc-41af-4c87-aa82-0c815e0fe830 tempest-ServersAdmin275Test-1825578340 tempest-ServersAdmin275Test-1825578340-project-admin] [instance: 8f133517-cff2-40c7-8333-a9116163313a] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1897.902039] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea37f35d-5091-4f71-82b3-4bf8a435442d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.911034] env[62405]: DEBUG oslo_vmware.api [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1947777, 'name': Rename_Task, 'duration_secs': 0.138145} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1897.911034] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1897.913015] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-80622f67-cb43-40ac-b349-d75202b624c5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.920992] env[62405]: DEBUG oslo_vmware.api [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for the task: (returnval){ [ 1897.920992] env[62405]: value = "task-1947779" [ 1897.920992] env[62405]: _type = "Task" [ 1897.920992] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1897.933169] env[62405]: DEBUG oslo_vmware.api [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1947779, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1897.938412] env[62405]: INFO nova.compute.manager [-] [instance: 08d7be6c-0557-46af-ae8d-e1c68e878cae] Took 3.93 seconds to deallocate network for instance. [ 1897.977053] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c53a7ace-30cc-43ab-9679-2e54a8736aab tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.894s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1897.979644] env[62405]: DEBUG nova.compute.manager [None req-c53a7ace-30cc-43ab-9679-2e54a8736aab tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 41e5385d-f0c7-4431-8424-e60dbeebaf8e] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1897.982755] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.795s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1897.984930] env[62405]: INFO nova.compute.claims [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 3b9a6a82-a426-4802-9640-5b39e5e0ff49] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1898.084031] env[62405]: DEBUG oslo_vmware.api [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': task-1947778, 'name': ReconfigVM_Task, 'duration_secs': 0.255822} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1898.084445] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: 06dbb3e0-876e-4290-81f5-6f95f9d5cb37] Reconfigured VM instance instance-00000058 to attach disk [datastore1] 06dbb3e0-876e-4290-81f5-6f95f9d5cb37/06dbb3e0-876e-4290-81f5-6f95f9d5cb37.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1898.085123] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5b3050d6-afaf-4211-8b16-bfd9a8b536e4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.091300] env[62405]: DEBUG oslo_vmware.api [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Waiting for the task: (returnval){ [ 1898.091300] env[62405]: value = "task-1947780" [ 1898.091300] env[62405]: _type = "Task" [ 1898.091300] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1898.099641] env[62405]: DEBUG oslo_vmware.api [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': task-1947780, 'name': Rename_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1898.102249] env[62405]: DEBUG nova.compute.manager [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1898.171388] env[62405]: DEBUG nova.network.neutron [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Successfully created port: 04edbefd-e96c-47d6-bfd7-72fb2a759156 {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1898.283275] env[62405]: DEBUG nova.network.neutron [-] [instance: 59fe34ab-c01d-4083-8bcd-ad6b4133a66f] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1898.314620] env[62405]: DEBUG oslo_concurrency.lockutils [None req-acac08d9-89b6-4d95-b005-8df87420e172 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1898.405474] env[62405]: DEBUG nova.compute.manager [req-7aafd1af-c9ac-4e14-bfff-43bbfe9aaa68 req-6b39dac8-afb4-4af4-96b4-3e3578c23784 service nova] [instance: 59fe34ab-c01d-4083-8bcd-ad6b4133a66f] Received event network-vif-deleted-55e7f388-c46a-48fe-b363-c49bbbe7f6b4 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1898.422389] env[62405]: DEBUG oslo_concurrency.lockutils [None req-498743dc-41af-4c87-aa82-0c815e0fe830 tempest-ServersAdmin275Test-1825578340 tempest-ServersAdmin275Test-1825578340-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1898.434506] env[62405]: DEBUG oslo_vmware.api [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1947779, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1898.444835] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a0a5dfdf-88bc-494a-976e-f5f89edf5182 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1898.489033] env[62405]: DEBUG nova.compute.utils [None req-c53a7ace-30cc-43ab-9679-2e54a8736aab tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1898.495021] env[62405]: DEBUG nova.compute.manager [None req-c53a7ace-30cc-43ab-9679-2e54a8736aab tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 41e5385d-f0c7-4431-8424-e60dbeebaf8e] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1898.495021] env[62405]: DEBUG nova.network.neutron [None req-c53a7ace-30cc-43ab-9679-2e54a8736aab tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 41e5385d-f0c7-4431-8424-e60dbeebaf8e] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1898.552599] env[62405]: DEBUG nova.policy [None req-c53a7ace-30cc-43ab-9679-2e54a8736aab tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '74ccaab252cb403bb54364c35d6dcbd4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1d2ff9a8cb1840889a4a2a87c663f59e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1898.601565] env[62405]: DEBUG oslo_vmware.api [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': task-1947780, 'name': Rename_Task, 'duration_secs': 0.18307} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1898.601874] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: 06dbb3e0-876e-4290-81f5-6f95f9d5cb37] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1898.602140] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6ad4a159-e618-4968-8bae-58f851f8c4e3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.610620] env[62405]: DEBUG oslo_vmware.api [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Waiting for the task: (returnval){ [ 1898.610620] env[62405]: value = "task-1947781" [ 1898.610620] env[62405]: _type = "Task" [ 1898.610620] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1898.622774] env[62405]: DEBUG oslo_vmware.api [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': task-1947781, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1898.788569] env[62405]: INFO nova.compute.manager [-] [instance: 59fe34ab-c01d-4083-8bcd-ad6b4133a66f] Took 2.83 seconds to deallocate network for instance. [ 1898.934470] env[62405]: DEBUG oslo_vmware.api [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1947779, 'name': PowerOnVM_Task, 'duration_secs': 0.531481} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1898.934752] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1898.935741] env[62405]: INFO nova.compute.manager [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Took 8.80 seconds to spawn the instance on the hypervisor. [ 1898.935967] env[62405]: DEBUG nova.compute.manager [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1898.936791] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c4cce05-1b19-461c-b414-44c15c2b9a68 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.994120] env[62405]: DEBUG nova.compute.manager [None req-c53a7ace-30cc-43ab-9679-2e54a8736aab tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 41e5385d-f0c7-4431-8424-e60dbeebaf8e] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1899.004344] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b4ff42ea-2473-423d-9440-c7342157735f tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Acquiring lock "8f133517-cff2-40c7-8333-a9116163313a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1899.005344] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b4ff42ea-2473-423d-9440-c7342157735f tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Lock "8f133517-cff2-40c7-8333-a9116163313a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1899.006435] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b4ff42ea-2473-423d-9440-c7342157735f tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Acquiring lock "8f133517-cff2-40c7-8333-a9116163313a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1899.006600] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b4ff42ea-2473-423d-9440-c7342157735f tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Lock "8f133517-cff2-40c7-8333-a9116163313a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1899.006782] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b4ff42ea-2473-423d-9440-c7342157735f tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Lock "8f133517-cff2-40c7-8333-a9116163313a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1899.010431] env[62405]: DEBUG nova.network.neutron [None req-c53a7ace-30cc-43ab-9679-2e54a8736aab tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 41e5385d-f0c7-4431-8424-e60dbeebaf8e] Successfully created port: 546c09a6-a133-400e-b556-9b225a501a58 {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1899.012516] env[62405]: INFO nova.compute.manager [None req-b4ff42ea-2473-423d-9440-c7342157735f tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] [instance: 8f133517-cff2-40c7-8333-a9116163313a] Terminating instance [ 1899.121046] env[62405]: DEBUG nova.compute.manager [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1899.122905] env[62405]: DEBUG oslo_vmware.api [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': task-1947781, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1899.165425] env[62405]: DEBUG nova.virt.hardware [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1899.165712] env[62405]: DEBUG nova.virt.hardware [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1899.165886] env[62405]: DEBUG nova.virt.hardware [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1899.166304] env[62405]: DEBUG nova.virt.hardware [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1899.166680] env[62405]: DEBUG nova.virt.hardware [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1899.167270] env[62405]: DEBUG nova.virt.hardware [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1899.167270] env[62405]: DEBUG nova.virt.hardware [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1899.167346] env[62405]: DEBUG nova.virt.hardware [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1899.167485] env[62405]: DEBUG nova.virt.hardware [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1899.167664] env[62405]: DEBUG nova.virt.hardware [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1899.167858] env[62405]: DEBUG nova.virt.hardware [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1899.169261] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-657c500c-0d26-474d-9575-9fe1519651ae {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.181303] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-151fdf14-7c16-487a-a460-a2aac260ff92 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.295744] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9aabdcb4-ff70-48a4-a746-e13602763515 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1899.337757] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b4d92f2-d7be-4643-b779-838cc74779a9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.346833] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ff9510a-d4d8-4991-93af-e44a74cede23 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.381407] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c745dfb-8ca6-4e6e-aecd-df6825962985 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.390634] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cff6f39-0f59-4c46-b20a-cd9ba1babfd9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.409375] env[62405]: DEBUG nova.compute.provider_tree [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1899.456040] env[62405]: INFO nova.compute.manager [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Took 35.20 seconds to build instance. [ 1899.518289] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b4ff42ea-2473-423d-9440-c7342157735f tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Acquiring lock "refresh_cache-8f133517-cff2-40c7-8333-a9116163313a" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1899.518289] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b4ff42ea-2473-423d-9440-c7342157735f tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Acquired lock "refresh_cache-8f133517-cff2-40c7-8333-a9116163313a" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1899.518289] env[62405]: DEBUG nova.network.neutron [None req-b4ff42ea-2473-423d-9440-c7342157735f tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] [instance: 8f133517-cff2-40c7-8333-a9116163313a] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1899.621215] env[62405]: DEBUG oslo_vmware.api [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': task-1947781, 'name': PowerOnVM_Task, 'duration_secs': 0.960034} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1899.622619] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: 06dbb3e0-876e-4290-81f5-6f95f9d5cb37] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1899.622619] env[62405]: INFO nova.compute.manager [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: 06dbb3e0-876e-4290-81f5-6f95f9d5cb37] Took 6.97 seconds to spawn the instance on the hypervisor. [ 1899.622619] env[62405]: DEBUG nova.compute.manager [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: 06dbb3e0-876e-4290-81f5-6f95f9d5cb37] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1899.622761] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0774ed10-2d25-4ba8-8941-6c594525ea69 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.823801] env[62405]: DEBUG nova.compute.manager [req-207f7956-314b-4a23-ba33-ba65eab294e5 req-720bdff7-0eb9-426c-8237-a5d1ee594471 service nova] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Received event network-vif-plugged-04edbefd-e96c-47d6-bfd7-72fb2a759156 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1899.824083] env[62405]: DEBUG oslo_concurrency.lockutils [req-207f7956-314b-4a23-ba33-ba65eab294e5 req-720bdff7-0eb9-426c-8237-a5d1ee594471 service nova] Acquiring lock "78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1899.824484] env[62405]: DEBUG oslo_concurrency.lockutils [req-207f7956-314b-4a23-ba33-ba65eab294e5 req-720bdff7-0eb9-426c-8237-a5d1ee594471 service nova] Lock "78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1899.824484] env[62405]: DEBUG oslo_concurrency.lockutils [req-207f7956-314b-4a23-ba33-ba65eab294e5 req-720bdff7-0eb9-426c-8237-a5d1ee594471 service nova] Lock "78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1899.824611] env[62405]: DEBUG nova.compute.manager [req-207f7956-314b-4a23-ba33-ba65eab294e5 req-720bdff7-0eb9-426c-8237-a5d1ee594471 service nova] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] No waiting events found dispatching network-vif-plugged-04edbefd-e96c-47d6-bfd7-72fb2a759156 {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1899.824855] env[62405]: WARNING nova.compute.manager [req-207f7956-314b-4a23-ba33-ba65eab294e5 req-720bdff7-0eb9-426c-8237-a5d1ee594471 service nova] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Received unexpected event network-vif-plugged-04edbefd-e96c-47d6-bfd7-72fb2a759156 for instance with vm_state building and task_state spawning. [ 1899.846064] env[62405]: DEBUG nova.network.neutron [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Successfully updated port: 04edbefd-e96c-47d6-bfd7-72fb2a759156 {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1899.955482] env[62405]: DEBUG nova.scheduler.client.report [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Updated inventory for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with generation 137 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1899.955776] env[62405]: DEBUG nova.compute.provider_tree [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Updating resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 generation from 137 to 138 during operation: update_inventory {{(pid=62405) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1899.955968] env[62405]: DEBUG nova.compute.provider_tree [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1899.960338] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3e70983d-78b4-4f0e-8b81-ac388634cfb5 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Lock "c39d9059-8da4-4c8d-99ab-d66b8445e7da" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.730s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1900.007305] env[62405]: DEBUG nova.compute.manager [None req-c53a7ace-30cc-43ab-9679-2e54a8736aab tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 41e5385d-f0c7-4431-8424-e60dbeebaf8e] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1900.039254] env[62405]: DEBUG nova.virt.hardware [None req-c53a7ace-30cc-43ab-9679-2e54a8736aab tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1900.039520] env[62405]: DEBUG nova.virt.hardware [None req-c53a7ace-30cc-43ab-9679-2e54a8736aab tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1900.039684] env[62405]: DEBUG nova.virt.hardware [None req-c53a7ace-30cc-43ab-9679-2e54a8736aab tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1900.039871] env[62405]: DEBUG nova.virt.hardware [None req-c53a7ace-30cc-43ab-9679-2e54a8736aab tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1900.040161] env[62405]: DEBUG nova.virt.hardware [None req-c53a7ace-30cc-43ab-9679-2e54a8736aab tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1900.040378] env[62405]: DEBUG nova.virt.hardware [None req-c53a7ace-30cc-43ab-9679-2e54a8736aab tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1900.040600] env[62405]: DEBUG nova.virt.hardware [None req-c53a7ace-30cc-43ab-9679-2e54a8736aab tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1900.040763] env[62405]: DEBUG nova.virt.hardware [None req-c53a7ace-30cc-43ab-9679-2e54a8736aab tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1900.040931] env[62405]: DEBUG nova.virt.hardware [None req-c53a7ace-30cc-43ab-9679-2e54a8736aab tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1900.041167] env[62405]: DEBUG nova.virt.hardware [None req-c53a7ace-30cc-43ab-9679-2e54a8736aab tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1900.041443] env[62405]: DEBUG nova.virt.hardware [None req-c53a7ace-30cc-43ab-9679-2e54a8736aab tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1900.042344] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f01c67b7-91c3-4650-a675-0ba9190d458d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.046431] env[62405]: DEBUG nova.network.neutron [None req-b4ff42ea-2473-423d-9440-c7342157735f tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] [instance: 8f133517-cff2-40c7-8333-a9116163313a] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1900.052012] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fed238a0-a6f9-4c9f-a0c3-f60566d0cb31 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.110092] env[62405]: DEBUG nova.network.neutron [None req-b4ff42ea-2473-423d-9440-c7342157735f tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] [instance: 8f133517-cff2-40c7-8333-a9116163313a] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1900.143273] env[62405]: INFO nova.compute.manager [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: 06dbb3e0-876e-4290-81f5-6f95f9d5cb37] Took 30.86 seconds to build instance. [ 1900.349014] env[62405]: DEBUG oslo_concurrency.lockutils [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Acquiring lock "refresh_cache-78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1900.349014] env[62405]: DEBUG oslo_concurrency.lockutils [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Acquired lock "refresh_cache-78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1900.349014] env[62405]: DEBUG nova.network.neutron [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1900.461823] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.479s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1900.462528] env[62405]: DEBUG nova.compute.manager [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 3b9a6a82-a426-4802-9640-5b39e5e0ff49] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1900.465794] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f2bdc459-aecd-4894-bdb2-7cc11ef17746 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.277s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1900.466218] env[62405]: DEBUG nova.objects.instance [None req-f2bdc459-aecd-4894-bdb2-7cc11ef17746 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Lazy-loading 'resources' on Instance uuid 171910d2-02b8-4219-ae75-5cecccea1de3 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1900.613043] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b4ff42ea-2473-423d-9440-c7342157735f tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Releasing lock "refresh_cache-8f133517-cff2-40c7-8333-a9116163313a" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1900.613675] env[62405]: DEBUG nova.compute.manager [None req-b4ff42ea-2473-423d-9440-c7342157735f tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] [instance: 8f133517-cff2-40c7-8333-a9116163313a] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1900.613926] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-b4ff42ea-2473-423d-9440-c7342157735f tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] [instance: 8f133517-cff2-40c7-8333-a9116163313a] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1900.614832] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3fb7388-ac28-4762-b5ec-89339f67aedb {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.625603] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4ff42ea-2473-423d-9440-c7342157735f tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] [instance: 8f133517-cff2-40c7-8333-a9116163313a] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1900.625962] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1e71b25b-a776-4639-be36-2604279d8e16 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.633327] env[62405]: DEBUG oslo_vmware.api [None req-b4ff42ea-2473-423d-9440-c7342157735f tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Waiting for the task: (returnval){ [ 1900.633327] env[62405]: value = "task-1947782" [ 1900.633327] env[62405]: _type = "Task" [ 1900.633327] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1900.645353] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a7786206-6658-453e-aa9c-fb6a7c7a118a tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Lock "06dbb3e0-876e-4290-81f5-6f95f9d5cb37" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.910s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1900.645677] env[62405]: DEBUG oslo_vmware.api [None req-b4ff42ea-2473-423d-9440-c7342157735f tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Task: {'id': task-1947782, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1900.887421] env[62405]: DEBUG nova.network.neutron [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1900.971794] env[62405]: DEBUG nova.compute.utils [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1900.976841] env[62405]: DEBUG nova.compute.manager [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 3b9a6a82-a426-4802-9640-5b39e5e0ff49] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1900.977279] env[62405]: DEBUG nova.network.neutron [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 3b9a6a82-a426-4802-9640-5b39e5e0ff49] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1901.051339] env[62405]: DEBUG nova.policy [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4ab5cc5829014c4ebafbf88400b22a8c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5ba2fba100b943a2a415ec37b9365388', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1901.122263] env[62405]: DEBUG nova.network.neutron [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Updating instance_info_cache with network_info: [{"id": "04edbefd-e96c-47d6-bfd7-72fb2a759156", "address": "fa:16:3e:5c:57:a9", "network": {"id": "b8b3a499-0f88-47ce-900b-2cc51e1d123f", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-5251275-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1d2ff9a8cb1840889a4a2a87c663f59e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6399297e-11b6-47b0-9a9f-712bb90b6ea1", "external-id": "nsx-vlan-transportzone-213", "segmentation_id": 213, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap04edbefd-e9", "ovs_interfaceid": "04edbefd-e96c-47d6-bfd7-72fb2a759156", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1901.148565] env[62405]: DEBUG oslo_vmware.api [None req-b4ff42ea-2473-423d-9440-c7342157735f tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Task: {'id': task-1947782, 'name': PowerOffVM_Task, 'duration_secs': 0.193818} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1901.151349] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4ff42ea-2473-423d-9440-c7342157735f tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] [instance: 8f133517-cff2-40c7-8333-a9116163313a] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1901.151536] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-b4ff42ea-2473-423d-9440-c7342157735f tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] [instance: 8f133517-cff2-40c7-8333-a9116163313a] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1901.152024] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8b70b194-fc0b-4515-97c9-9a4cf0b9560c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.180378] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-b4ff42ea-2473-423d-9440-c7342157735f tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] [instance: 8f133517-cff2-40c7-8333-a9116163313a] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1901.180609] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-b4ff42ea-2473-423d-9440-c7342157735f tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] [instance: 8f133517-cff2-40c7-8333-a9116163313a] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1901.180795] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-b4ff42ea-2473-423d-9440-c7342157735f tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Deleting the datastore file [datastore1] 8f133517-cff2-40c7-8333-a9116163313a {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1901.181075] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-972c5aa6-e477-4c5b-bff3-7edf511a2144 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.187950] env[62405]: DEBUG oslo_vmware.api [None req-b4ff42ea-2473-423d-9440-c7342157735f tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Waiting for the task: (returnval){ [ 1901.187950] env[62405]: value = "task-1947784" [ 1901.187950] env[62405]: _type = "Task" [ 1901.187950] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1901.201748] env[62405]: DEBUG oslo_vmware.api [None req-b4ff42ea-2473-423d-9440-c7342157735f tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Task: {'id': task-1947784, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1901.272060] env[62405]: DEBUG nova.compute.manager [req-3797bdaa-3db1-4206-93be-8f48c761cf79 req-a6048851-10f5-486b-b443-f48a0cc932b7 service nova] [instance: 41e5385d-f0c7-4431-8424-e60dbeebaf8e] Received event network-vif-plugged-546c09a6-a133-400e-b556-9b225a501a58 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1901.272274] env[62405]: DEBUG oslo_concurrency.lockutils [req-3797bdaa-3db1-4206-93be-8f48c761cf79 req-a6048851-10f5-486b-b443-f48a0cc932b7 service nova] Acquiring lock "41e5385d-f0c7-4431-8424-e60dbeebaf8e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1901.272478] env[62405]: DEBUG oslo_concurrency.lockutils [req-3797bdaa-3db1-4206-93be-8f48c761cf79 req-a6048851-10f5-486b-b443-f48a0cc932b7 service nova] Lock "41e5385d-f0c7-4431-8424-e60dbeebaf8e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1901.272639] env[62405]: DEBUG oslo_concurrency.lockutils [req-3797bdaa-3db1-4206-93be-8f48c761cf79 req-a6048851-10f5-486b-b443-f48a0cc932b7 service nova] Lock "41e5385d-f0c7-4431-8424-e60dbeebaf8e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1901.272814] env[62405]: DEBUG nova.compute.manager [req-3797bdaa-3db1-4206-93be-8f48c761cf79 req-a6048851-10f5-486b-b443-f48a0cc932b7 service nova] [instance: 41e5385d-f0c7-4431-8424-e60dbeebaf8e] No waiting events found dispatching network-vif-plugged-546c09a6-a133-400e-b556-9b225a501a58 {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1901.273383] env[62405]: WARNING nova.compute.manager [req-3797bdaa-3db1-4206-93be-8f48c761cf79 req-a6048851-10f5-486b-b443-f48a0cc932b7 service nova] [instance: 41e5385d-f0c7-4431-8424-e60dbeebaf8e] Received unexpected event network-vif-plugged-546c09a6-a133-400e-b556-9b225a501a58 for instance with vm_state building and task_state spawning. [ 1901.347229] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b6b0d22-3297-4003-ba9a-15f8a9483d35 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.355058] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6f38d6a-16dc-474e-b934-d48979805e0c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.360536] env[62405]: DEBUG nova.compute.manager [req-fda1c77b-1053-4edb-817e-78178e33666f req-a6149f2d-bbce-4312-a3d1-595977d714e6 service nova] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Received event network-changed-2026016a-87b1-42ae-a04f-d95c5fb37377 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1901.360536] env[62405]: DEBUG nova.compute.manager [req-fda1c77b-1053-4edb-817e-78178e33666f req-a6149f2d-bbce-4312-a3d1-595977d714e6 service nova] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Refreshing instance network info cache due to event network-changed-2026016a-87b1-42ae-a04f-d95c5fb37377. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1901.360735] env[62405]: DEBUG oslo_concurrency.lockutils [req-fda1c77b-1053-4edb-817e-78178e33666f req-a6149f2d-bbce-4312-a3d1-595977d714e6 service nova] Acquiring lock "refresh_cache-c39d9059-8da4-4c8d-99ab-d66b8445e7da" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1901.360775] env[62405]: DEBUG oslo_concurrency.lockutils [req-fda1c77b-1053-4edb-817e-78178e33666f req-a6149f2d-bbce-4312-a3d1-595977d714e6 service nova] Acquired lock "refresh_cache-c39d9059-8da4-4c8d-99ab-d66b8445e7da" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1901.362516] env[62405]: DEBUG nova.network.neutron [req-fda1c77b-1053-4edb-817e-78178e33666f req-a6149f2d-bbce-4312-a3d1-595977d714e6 service nova] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Refreshing network info cache for port 2026016a-87b1-42ae-a04f-d95c5fb37377 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1901.392760] env[62405]: DEBUG nova.network.neutron [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 3b9a6a82-a426-4802-9640-5b39e5e0ff49] Successfully created port: ea9789f3-0da8-4e46-8cde-6c9ccb5b562d {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1901.395772] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2d78e61-4a96-4f02-8c34-8220aa681256 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.404994] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b64ffa13-74c9-4ec2-abfa-8046770eabf8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.419495] env[62405]: DEBUG nova.compute.provider_tree [None req-f2bdc459-aecd-4894-bdb2-7cc11ef17746 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1901.478716] env[62405]: DEBUG nova.compute.manager [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 3b9a6a82-a426-4802-9640-5b39e5e0ff49] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1901.601392] env[62405]: DEBUG nova.network.neutron [None req-c53a7ace-30cc-43ab-9679-2e54a8736aab tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 41e5385d-f0c7-4431-8424-e60dbeebaf8e] Successfully updated port: 546c09a6-a133-400e-b556-9b225a501a58 {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1901.624480] env[62405]: DEBUG oslo_concurrency.lockutils [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Releasing lock "refresh_cache-78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1901.624797] env[62405]: DEBUG nova.compute.manager [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Instance network_info: |[{"id": "04edbefd-e96c-47d6-bfd7-72fb2a759156", "address": "fa:16:3e:5c:57:a9", "network": {"id": "b8b3a499-0f88-47ce-900b-2cc51e1d123f", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-5251275-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1d2ff9a8cb1840889a4a2a87c663f59e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6399297e-11b6-47b0-9a9f-712bb90b6ea1", "external-id": "nsx-vlan-transportzone-213", "segmentation_id": 213, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap04edbefd-e9", "ovs_interfaceid": "04edbefd-e96c-47d6-bfd7-72fb2a759156", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1901.625920] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5c:57:a9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6399297e-11b6-47b0-9a9f-712bb90b6ea1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '04edbefd-e96c-47d6-bfd7-72fb2a759156', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1901.634028] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Creating folder: Project (1d2ff9a8cb1840889a4a2a87c663f59e). Parent ref: group-v401284. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1901.634285] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8efd6aad-84d8-4154-adcc-db961a23f89f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.645597] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Created folder: Project (1d2ff9a8cb1840889a4a2a87c663f59e) in parent group-v401284. [ 1901.645875] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Creating folder: Instances. Parent ref: group-v401527. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1901.646719] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-449a90cc-10ca-405b-b925-a2a4c09c71e2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.657935] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Created folder: Instances in parent group-v401527. [ 1901.657935] env[62405]: DEBUG oslo.service.loopingcall [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1901.658161] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1901.658313] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-93f9b8f0-1341-4e67-900f-95bc6f1dc19d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.678945] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1901.678945] env[62405]: value = "task-1947787" [ 1901.678945] env[62405]: _type = "Task" [ 1901.678945] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1901.690017] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947787, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1901.698572] env[62405]: DEBUG oslo_vmware.api [None req-b4ff42ea-2473-423d-9440-c7342157735f tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Task: {'id': task-1947784, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.096339} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1901.699172] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-b4ff42ea-2473-423d-9440-c7342157735f tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1901.699172] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-b4ff42ea-2473-423d-9440-c7342157735f tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] [instance: 8f133517-cff2-40c7-8333-a9116163313a] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1901.699445] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-b4ff42ea-2473-423d-9440-c7342157735f tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] [instance: 8f133517-cff2-40c7-8333-a9116163313a] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1901.699445] env[62405]: INFO nova.compute.manager [None req-b4ff42ea-2473-423d-9440-c7342157735f tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] [instance: 8f133517-cff2-40c7-8333-a9116163313a] Took 1.09 seconds to destroy the instance on the hypervisor. [ 1901.699700] env[62405]: DEBUG oslo.service.loopingcall [None req-b4ff42ea-2473-423d-9440-c7342157735f tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1901.699800] env[62405]: DEBUG nova.compute.manager [-] [instance: 8f133517-cff2-40c7-8333-a9116163313a] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1901.699936] env[62405]: DEBUG nova.network.neutron [-] [instance: 8f133517-cff2-40c7-8333-a9116163313a] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1901.724347] env[62405]: DEBUG nova.network.neutron [-] [instance: 8f133517-cff2-40c7-8333-a9116163313a] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1901.954687] env[62405]: DEBUG nova.compute.manager [req-0dcca4da-295d-4fde-afab-ca0dd9e5049d req-ec834e62-f9fd-4b4f-8600-8837f9d2be0f service nova] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Received event network-changed-04edbefd-e96c-47d6-bfd7-72fb2a759156 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1901.954938] env[62405]: DEBUG nova.compute.manager [req-0dcca4da-295d-4fde-afab-ca0dd9e5049d req-ec834e62-f9fd-4b4f-8600-8837f9d2be0f service nova] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Refreshing instance network info cache due to event network-changed-04edbefd-e96c-47d6-bfd7-72fb2a759156. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1901.955195] env[62405]: DEBUG oslo_concurrency.lockutils [req-0dcca4da-295d-4fde-afab-ca0dd9e5049d req-ec834e62-f9fd-4b4f-8600-8837f9d2be0f service nova] Acquiring lock "refresh_cache-78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1901.955436] env[62405]: DEBUG oslo_concurrency.lockutils [req-0dcca4da-295d-4fde-afab-ca0dd9e5049d req-ec834e62-f9fd-4b4f-8600-8837f9d2be0f service nova] Acquired lock "refresh_cache-78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1901.955697] env[62405]: DEBUG nova.network.neutron [req-0dcca4da-295d-4fde-afab-ca0dd9e5049d req-ec834e62-f9fd-4b4f-8600-8837f9d2be0f service nova] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Refreshing network info cache for port 04edbefd-e96c-47d6-bfd7-72fb2a759156 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1901.958215] env[62405]: DEBUG nova.scheduler.client.report [None req-f2bdc459-aecd-4894-bdb2-7cc11ef17746 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Updated inventory for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with generation 138 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1901.958525] env[62405]: DEBUG nova.compute.provider_tree [None req-f2bdc459-aecd-4894-bdb2-7cc11ef17746 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Updating resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 generation from 138 to 139 during operation: update_inventory {{(pid=62405) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1901.958770] env[62405]: DEBUG nova.compute.provider_tree [None req-f2bdc459-aecd-4894-bdb2-7cc11ef17746 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1902.104089] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c53a7ace-30cc-43ab-9679-2e54a8736aab tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Acquiring lock "refresh_cache-41e5385d-f0c7-4431-8424-e60dbeebaf8e" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1902.104305] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c53a7ace-30cc-43ab-9679-2e54a8736aab tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Acquired lock "refresh_cache-41e5385d-f0c7-4431-8424-e60dbeebaf8e" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1902.104481] env[62405]: DEBUG nova.network.neutron [None req-c53a7ace-30cc-43ab-9679-2e54a8736aab tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 41e5385d-f0c7-4431-8424-e60dbeebaf8e] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1902.190522] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947787, 'name': CreateVM_Task, 'duration_secs': 0.400527} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1902.190705] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1902.191409] env[62405]: DEBUG oslo_concurrency.lockutils [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1902.191577] env[62405]: DEBUG oslo_concurrency.lockutils [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1902.191900] env[62405]: DEBUG oslo_concurrency.lockutils [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1902.192182] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d953bab1-4d5c-419d-abfc-359c6b9d590b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.196687] env[62405]: DEBUG oslo_vmware.api [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Waiting for the task: (returnval){ [ 1902.196687] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52a0b117-d616-664e-2d2a-60f5e18129f5" [ 1902.196687] env[62405]: _type = "Task" [ 1902.196687] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1902.204522] env[62405]: DEBUG oslo_vmware.api [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52a0b117-d616-664e-2d2a-60f5e18129f5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1902.227123] env[62405]: DEBUG nova.network.neutron [-] [instance: 8f133517-cff2-40c7-8333-a9116163313a] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1902.277110] env[62405]: DEBUG nova.network.neutron [req-fda1c77b-1053-4edb-817e-78178e33666f req-a6149f2d-bbce-4312-a3d1-595977d714e6 service nova] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Updated VIF entry in instance network info cache for port 2026016a-87b1-42ae-a04f-d95c5fb37377. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1902.277544] env[62405]: DEBUG nova.network.neutron [req-fda1c77b-1053-4edb-817e-78178e33666f req-a6149f2d-bbce-4312-a3d1-595977d714e6 service nova] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Updating instance_info_cache with network_info: [{"id": "2026016a-87b1-42ae-a04f-d95c5fb37377", "address": "fa:16:3e:bc:e8:85", "network": {"id": "3ca0a5a2-a18f-47fa-9d7b-0e27aa414878", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1312490542-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.142", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f3b50cc219314108945bfc8b2c21849a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7edb7c08-2fae-4df5-9ec6-5ccf06d7e337", "external-id": "nsx-vlan-transportzone-309", "segmentation_id": 309, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2026016a-87", "ovs_interfaceid": "2026016a-87b1-42ae-a04f-d95c5fb37377", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1902.466465] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f2bdc459-aecd-4894-bdb2-7cc11ef17746 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1902.468900] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 25.688s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1902.491564] env[62405]: DEBUG nova.compute.manager [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 3b9a6a82-a426-4802-9640-5b39e5e0ff49] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1902.494390] env[62405]: INFO nova.scheduler.client.report [None req-f2bdc459-aecd-4894-bdb2-7cc11ef17746 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Deleted allocations for instance 171910d2-02b8-4219-ae75-5cecccea1de3 [ 1902.521903] env[62405]: DEBUG nova.virt.hardware [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1902.522163] env[62405]: DEBUG nova.virt.hardware [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1902.522327] env[62405]: DEBUG nova.virt.hardware [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1902.522607] env[62405]: DEBUG nova.virt.hardware [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1902.522672] env[62405]: DEBUG nova.virt.hardware [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1902.522797] env[62405]: DEBUG nova.virt.hardware [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1902.523020] env[62405]: DEBUG nova.virt.hardware [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1902.523187] env[62405]: DEBUG nova.virt.hardware [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1902.523356] env[62405]: DEBUG nova.virt.hardware [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1902.523781] env[62405]: DEBUG nova.virt.hardware [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1902.523781] env[62405]: DEBUG nova.virt.hardware [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1902.524555] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65bc4238-95f5-4c8b-93fd-859afc51e209 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.533542] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ec2d6af-9058-424f-9f34-7bfbc81d0d28 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.646928] env[62405]: DEBUG nova.network.neutron [None req-c53a7ace-30cc-43ab-9679-2e54a8736aab tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 41e5385d-f0c7-4431-8424-e60dbeebaf8e] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1902.714486] env[62405]: DEBUG oslo_vmware.api [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52a0b117-d616-664e-2d2a-60f5e18129f5, 'name': SearchDatastore_Task, 'duration_secs': 0.009174} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1902.714486] env[62405]: DEBUG oslo_concurrency.lockutils [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1902.714810] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1902.714974] env[62405]: DEBUG oslo_concurrency.lockutils [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1902.715167] env[62405]: DEBUG oslo_concurrency.lockutils [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1902.715350] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1902.715861] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1bd5b567-9227-44f0-aa33-01c7c96f6a27 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.727054] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1902.727205] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1902.728059] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-76c886a6-6508-4f83-a3dc-701b8e789f16 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.734056] env[62405]: INFO nova.compute.manager [-] [instance: 8f133517-cff2-40c7-8333-a9116163313a] Took 1.03 seconds to deallocate network for instance. [ 1902.740496] env[62405]: DEBUG oslo_vmware.api [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Waiting for the task: (returnval){ [ 1902.740496] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5266c7f1-8302-0f80-e136-04b2c7c9419b" [ 1902.740496] env[62405]: _type = "Task" [ 1902.740496] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1902.748911] env[62405]: DEBUG oslo_vmware.api [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5266c7f1-8302-0f80-e136-04b2c7c9419b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1902.781028] env[62405]: DEBUG oslo_concurrency.lockutils [req-fda1c77b-1053-4edb-817e-78178e33666f req-a6149f2d-bbce-4312-a3d1-595977d714e6 service nova] Releasing lock "refresh_cache-c39d9059-8da4-4c8d-99ab-d66b8445e7da" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1902.813142] env[62405]: DEBUG nova.network.neutron [req-0dcca4da-295d-4fde-afab-ca0dd9e5049d req-ec834e62-f9fd-4b4f-8600-8837f9d2be0f service nova] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Updated VIF entry in instance network info cache for port 04edbefd-e96c-47d6-bfd7-72fb2a759156. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1902.813142] env[62405]: DEBUG nova.network.neutron [req-0dcca4da-295d-4fde-afab-ca0dd9e5049d req-ec834e62-f9fd-4b4f-8600-8837f9d2be0f service nova] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Updating instance_info_cache with network_info: [{"id": "04edbefd-e96c-47d6-bfd7-72fb2a759156", "address": "fa:16:3e:5c:57:a9", "network": {"id": "b8b3a499-0f88-47ce-900b-2cc51e1d123f", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-5251275-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1d2ff9a8cb1840889a4a2a87c663f59e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6399297e-11b6-47b0-9a9f-712bb90b6ea1", "external-id": "nsx-vlan-transportzone-213", "segmentation_id": 213, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap04edbefd-e9", "ovs_interfaceid": "04edbefd-e96c-47d6-bfd7-72fb2a759156", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1902.863815] env[62405]: DEBUG nova.network.neutron [None req-c53a7ace-30cc-43ab-9679-2e54a8736aab tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 41e5385d-f0c7-4431-8424-e60dbeebaf8e] Updating instance_info_cache with network_info: [{"id": "546c09a6-a133-400e-b556-9b225a501a58", "address": "fa:16:3e:7e:ca:43", "network": {"id": "b8b3a499-0f88-47ce-900b-2cc51e1d123f", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-5251275-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1d2ff9a8cb1840889a4a2a87c663f59e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6399297e-11b6-47b0-9a9f-712bb90b6ea1", "external-id": "nsx-vlan-transportzone-213", "segmentation_id": 213, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap546c09a6-a1", "ovs_interfaceid": "546c09a6-a133-400e-b556-9b225a501a58", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1902.876397] env[62405]: DEBUG nova.compute.manager [None req-ad0e8743-1b34-40b4-b722-0ba93834c694 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: 06dbb3e0-876e-4290-81f5-6f95f9d5cb37] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1902.877549] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12f319b3-6607-4b10-bb62-ce7c81221885 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.001127] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f2bdc459-aecd-4894-bdb2-7cc11ef17746 tempest-ServersTestMultiNic-1934829859 tempest-ServersTestMultiNic-1934829859-project-member] Lock "171910d2-02b8-4219-ae75-5cecccea1de3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.078s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1903.087799] env[62405]: DEBUG nova.network.neutron [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 3b9a6a82-a426-4802-9640-5b39e5e0ff49] Successfully updated port: ea9789f3-0da8-4e46-8cde-6c9ccb5b562d {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1903.242634] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b4ff42ea-2473-423d-9440-c7342157735f tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1903.253096] env[62405]: DEBUG oslo_vmware.api [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5266c7f1-8302-0f80-e136-04b2c7c9419b, 'name': SearchDatastore_Task, 'duration_secs': 0.011088} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1903.254147] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4343eaa6-f3eb-47e5-b636-5ae06eb82721 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.260105] env[62405]: DEBUG oslo_vmware.api [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Waiting for the task: (returnval){ [ 1903.260105] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]528b08d2-3445-be9f-3700-22692a43e894" [ 1903.260105] env[62405]: _type = "Task" [ 1903.260105] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1903.268487] env[62405]: DEBUG oslo_vmware.api [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]528b08d2-3445-be9f-3700-22692a43e894, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1903.315350] env[62405]: DEBUG oslo_concurrency.lockutils [req-0dcca4da-295d-4fde-afab-ca0dd9e5049d req-ec834e62-f9fd-4b4f-8600-8837f9d2be0f service nova] Releasing lock "refresh_cache-78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1903.367466] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c53a7ace-30cc-43ab-9679-2e54a8736aab tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Releasing lock "refresh_cache-41e5385d-f0c7-4431-8424-e60dbeebaf8e" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1903.369045] env[62405]: DEBUG nova.compute.manager [None req-c53a7ace-30cc-43ab-9679-2e54a8736aab tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 41e5385d-f0c7-4431-8424-e60dbeebaf8e] Instance network_info: |[{"id": "546c09a6-a133-400e-b556-9b225a501a58", "address": "fa:16:3e:7e:ca:43", "network": {"id": "b8b3a499-0f88-47ce-900b-2cc51e1d123f", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-5251275-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1d2ff9a8cb1840889a4a2a87c663f59e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6399297e-11b6-47b0-9a9f-712bb90b6ea1", "external-id": "nsx-vlan-transportzone-213", "segmentation_id": 213, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap546c09a6-a1", "ovs_interfaceid": "546c09a6-a133-400e-b556-9b225a501a58", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1903.369563] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-c53a7ace-30cc-43ab-9679-2e54a8736aab tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 41e5385d-f0c7-4431-8424-e60dbeebaf8e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7e:ca:43', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6399297e-11b6-47b0-9a9f-712bb90b6ea1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '546c09a6-a133-400e-b556-9b225a501a58', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1903.380658] env[62405]: DEBUG oslo.service.loopingcall [None req-c53a7ace-30cc-43ab-9679-2e54a8736aab tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1903.380658] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 41e5385d-f0c7-4431-8424-e60dbeebaf8e] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1903.380658] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b9178f69-e739-47b9-a4e4-a48601930cc3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.396372] env[62405]: INFO nova.compute.manager [None req-ad0e8743-1b34-40b4-b722-0ba93834c694 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: 06dbb3e0-876e-4290-81f5-6f95f9d5cb37] instance snapshotting [ 1903.401243] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e0d2c0f-b81e-4d1b-8b37-06504273d283 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.403568] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1903.403568] env[62405]: value = "task-1947788" [ 1903.403568] env[62405]: _type = "Task" [ 1903.403568] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1903.421569] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60a3ef6a-7df9-41ec-beaa-c6d88d4557c0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.429029] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947788, 'name': CreateVM_Task} progress is 10%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1903.481623] env[62405]: INFO nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 81aebf11-5d80-4a86-b232-3ecc5f3892c2] Updating resource usage from migration e28e937c-0b2d-4bf2-9f78-c26841fab210 [ 1903.503532] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 67bf25ea-5774-4246-a3e6-2aeb0ebf6731 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1903.503710] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 4d59d9fd-23df-4933-97ed-32602e51e9aa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1903.503902] env[62405]: WARNING nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 2c623c00-92f2-4cc4-8503-963c3308d708 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1903.504067] env[62405]: WARNING nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 4c8c0d2f-d8d3-4422-8a5c-8999636b22be is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1903.504238] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance d186b2f4-3fd1-44be-b8a4-080972aff3a0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1903.504382] env[62405]: WARNING nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 9d97bf1d-6830-48b1-831b-bf2b52188f32 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1903.504515] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance f16e3d13-6db6-4f61-b0e4-661856a9166b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1903.504650] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 86378df0-a658-427d-aca5-de25f84eb28b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1903.504785] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 8f133517-cff2-40c7-8333-a9116163313a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1903.504933] env[62405]: WARNING nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 59fe34ab-c01d-4083-8bcd-ad6b4133a66f is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1903.505121] env[62405]: WARNING nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 08d7be6c-0557-46af-ae8d-e1c68e878cae is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1903.505270] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance c39d9059-8da4-4c8d-99ab-d66b8445e7da actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1903.505410] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 06dbb3e0-876e-4290-81f5-6f95f9d5cb37 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1903.505541] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1903.505659] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 41e5385d-f0c7-4431-8424-e60dbeebaf8e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1903.505772] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 3b9a6a82-a426-4802-9640-5b39e5e0ff49 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1903.505884] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Migration e28e937c-0b2d-4bf2-9f78-c26841fab210 is active on this compute host and has allocations in placement: {'resources': {'VCPU': 1, 'MEMORY_MB': 192, 'DISK_GB': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1903.505986] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 81aebf11-5d80-4a86-b232-3ecc5f3892c2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1903.521884] env[62405]: DEBUG nova.compute.manager [req-c43e7e77-324f-4137-a5b5-56da84e219c4 req-c439f260-12fc-4432-adaa-5157571deafb service nova] [instance: 41e5385d-f0c7-4431-8424-e60dbeebaf8e] Received event network-changed-546c09a6-a133-400e-b556-9b225a501a58 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1903.521997] env[62405]: DEBUG nova.compute.manager [req-c43e7e77-324f-4137-a5b5-56da84e219c4 req-c439f260-12fc-4432-adaa-5157571deafb service nova] [instance: 41e5385d-f0c7-4431-8424-e60dbeebaf8e] Refreshing instance network info cache due to event network-changed-546c09a6-a133-400e-b556-9b225a501a58. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1903.522211] env[62405]: DEBUG oslo_concurrency.lockutils [req-c43e7e77-324f-4137-a5b5-56da84e219c4 req-c439f260-12fc-4432-adaa-5157571deafb service nova] Acquiring lock "refresh_cache-41e5385d-f0c7-4431-8424-e60dbeebaf8e" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1903.522360] env[62405]: DEBUG oslo_concurrency.lockutils [req-c43e7e77-324f-4137-a5b5-56da84e219c4 req-c439f260-12fc-4432-adaa-5157571deafb service nova] Acquired lock "refresh_cache-41e5385d-f0c7-4431-8424-e60dbeebaf8e" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1903.522520] env[62405]: DEBUG nova.network.neutron [req-c43e7e77-324f-4137-a5b5-56da84e219c4 req-c439f260-12fc-4432-adaa-5157571deafb service nova] [instance: 41e5385d-f0c7-4431-8424-e60dbeebaf8e] Refreshing network info cache for port 546c09a6-a133-400e-b556-9b225a501a58 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1903.590990] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Acquiring lock "refresh_cache-3b9a6a82-a426-4802-9640-5b39e5e0ff49" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1903.590990] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Acquired lock "refresh_cache-3b9a6a82-a426-4802-9640-5b39e5e0ff49" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1903.590990] env[62405]: DEBUG nova.network.neutron [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 3b9a6a82-a426-4802-9640-5b39e5e0ff49] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1903.772767] env[62405]: DEBUG oslo_vmware.api [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]528b08d2-3445-be9f-3700-22692a43e894, 'name': SearchDatastore_Task, 'duration_secs': 0.038221} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1903.773091] env[62405]: DEBUG oslo_concurrency.lockutils [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1903.773356] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d/78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1903.773616] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3fc8f6f4-cac7-4bca-92b2-550de7c6e616 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.781621] env[62405]: DEBUG oslo_vmware.api [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Waiting for the task: (returnval){ [ 1903.781621] env[62405]: value = "task-1947789" [ 1903.781621] env[62405]: _type = "Task" [ 1903.781621] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1903.790522] env[62405]: DEBUG oslo_vmware.api [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1947789, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1903.913802] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947788, 'name': CreateVM_Task} progress is 25%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1903.935158] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-ad0e8743-1b34-40b4-b722-0ba93834c694 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: 06dbb3e0-876e-4290-81f5-6f95f9d5cb37] Creating Snapshot of the VM instance {{(pid=62405) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1903.935495] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-3a85cec9-1275-4976-85e5-8dda5c61ac9f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.945034] env[62405]: DEBUG oslo_vmware.api [None req-ad0e8743-1b34-40b4-b722-0ba93834c694 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Waiting for the task: (returnval){ [ 1903.945034] env[62405]: value = "task-1947790" [ 1903.945034] env[62405]: _type = "Task" [ 1903.945034] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1903.955163] env[62405]: DEBUG oslo_vmware.api [None req-ad0e8743-1b34-40b4-b722-0ba93834c694 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': task-1947790, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1904.007402] env[62405]: DEBUG nova.compute.manager [req-f633661c-4bae-4143-bb8e-350871825cc0 req-33db90e3-6795-484d-9746-72728edb7cdd service nova] [instance: 3b9a6a82-a426-4802-9640-5b39e5e0ff49] Received event network-vif-plugged-ea9789f3-0da8-4e46-8cde-6c9ccb5b562d {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1904.008343] env[62405]: DEBUG oslo_concurrency.lockutils [req-f633661c-4bae-4143-bb8e-350871825cc0 req-33db90e3-6795-484d-9746-72728edb7cdd service nova] Acquiring lock "3b9a6a82-a426-4802-9640-5b39e5e0ff49-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1904.008584] env[62405]: DEBUG oslo_concurrency.lockutils [req-f633661c-4bae-4143-bb8e-350871825cc0 req-33db90e3-6795-484d-9746-72728edb7cdd service nova] Lock "3b9a6a82-a426-4802-9640-5b39e5e0ff49-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1904.008622] env[62405]: DEBUG oslo_concurrency.lockutils [req-f633661c-4bae-4143-bb8e-350871825cc0 req-33db90e3-6795-484d-9746-72728edb7cdd service nova] Lock "3b9a6a82-a426-4802-9640-5b39e5e0ff49-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1904.008859] env[62405]: DEBUG nova.compute.manager [req-f633661c-4bae-4143-bb8e-350871825cc0 req-33db90e3-6795-484d-9746-72728edb7cdd service nova] [instance: 3b9a6a82-a426-4802-9640-5b39e5e0ff49] No waiting events found dispatching network-vif-plugged-ea9789f3-0da8-4e46-8cde-6c9ccb5b562d {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1904.008986] env[62405]: WARNING nova.compute.manager [req-f633661c-4bae-4143-bb8e-350871825cc0 req-33db90e3-6795-484d-9746-72728edb7cdd service nova] [instance: 3b9a6a82-a426-4802-9640-5b39e5e0ff49] Received unexpected event network-vif-plugged-ea9789f3-0da8-4e46-8cde-6c9ccb5b562d for instance with vm_state building and task_state spawning. [ 1904.009277] env[62405]: DEBUG nova.compute.manager [req-f633661c-4bae-4143-bb8e-350871825cc0 req-33db90e3-6795-484d-9746-72728edb7cdd service nova] [instance: 3b9a6a82-a426-4802-9640-5b39e5e0ff49] Received event network-changed-ea9789f3-0da8-4e46-8cde-6c9ccb5b562d {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1904.009314] env[62405]: DEBUG nova.compute.manager [req-f633661c-4bae-4143-bb8e-350871825cc0 req-33db90e3-6795-484d-9746-72728edb7cdd service nova] [instance: 3b9a6a82-a426-4802-9640-5b39e5e0ff49] Refreshing instance network info cache due to event network-changed-ea9789f3-0da8-4e46-8cde-6c9ccb5b562d. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1904.009505] env[62405]: DEBUG oslo_concurrency.lockutils [req-f633661c-4bae-4143-bb8e-350871825cc0 req-33db90e3-6795-484d-9746-72728edb7cdd service nova] Acquiring lock "refresh_cache-3b9a6a82-a426-4802-9640-5b39e5e0ff49" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1904.011083] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance b495f9e6-60c8-4509-a34f-2e7ed59b6d82 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1904.126311] env[62405]: DEBUG oslo_concurrency.lockutils [None req-db095d67-8972-456d-a475-f7f18507a1ab tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Acquiring lock "f16e3d13-6db6-4f61-b0e4-661856a9166b" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1904.126660] env[62405]: DEBUG oslo_concurrency.lockutils [None req-db095d67-8972-456d-a475-f7f18507a1ab tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Lock "f16e3d13-6db6-4f61-b0e4-661856a9166b" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1904.126801] env[62405]: INFO nova.compute.manager [None req-db095d67-8972-456d-a475-f7f18507a1ab tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Shelving [ 1904.157839] env[62405]: DEBUG nova.network.neutron [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 3b9a6a82-a426-4802-9640-5b39e5e0ff49] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1904.294598] env[62405]: DEBUG oslo_vmware.api [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1947789, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1904.415120] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947788, 'name': CreateVM_Task, 'duration_secs': 0.622424} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1904.415325] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 41e5385d-f0c7-4431-8424-e60dbeebaf8e] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1904.416077] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c53a7ace-30cc-43ab-9679-2e54a8736aab tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1904.416299] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c53a7ace-30cc-43ab-9679-2e54a8736aab tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1904.416615] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c53a7ace-30cc-43ab-9679-2e54a8736aab tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1904.416893] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f8027f6b-44eb-48d5-b839-88caaaea4948 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.423421] env[62405]: DEBUG oslo_vmware.api [None req-c53a7ace-30cc-43ab-9679-2e54a8736aab tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Waiting for the task: (returnval){ [ 1904.423421] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52cefc27-c210-4e23-eeed-9bf810ee2a34" [ 1904.423421] env[62405]: _type = "Task" [ 1904.423421] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1904.436909] env[62405]: DEBUG oslo_vmware.api [None req-c53a7ace-30cc-43ab-9679-2e54a8736aab tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52cefc27-c210-4e23-eeed-9bf810ee2a34, 'name': SearchDatastore_Task, 'duration_secs': 0.010428} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1904.437575] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c53a7ace-30cc-43ab-9679-2e54a8736aab tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1904.437843] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-c53a7ace-30cc-43ab-9679-2e54a8736aab tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 41e5385d-f0c7-4431-8424-e60dbeebaf8e] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1904.438113] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c53a7ace-30cc-43ab-9679-2e54a8736aab tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1904.438273] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c53a7ace-30cc-43ab-9679-2e54a8736aab tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1904.438581] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-c53a7ace-30cc-43ab-9679-2e54a8736aab tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1904.438842] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-18fc57b7-9561-46f3-b2ef-4e0c150b1aa3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.446606] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-c53a7ace-30cc-43ab-9679-2e54a8736aab tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1904.446784] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-c53a7ace-30cc-43ab-9679-2e54a8736aab tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1904.451036] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-22d1090d-b842-4582-bd88-e0d95e2ae4df {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.456319] env[62405]: DEBUG oslo_vmware.api [None req-c53a7ace-30cc-43ab-9679-2e54a8736aab tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Waiting for the task: (returnval){ [ 1904.456319] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52339a3b-2df8-b6e2-e0e3-1ed1ddcff66a" [ 1904.456319] env[62405]: _type = "Task" [ 1904.456319] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1904.459298] env[62405]: DEBUG oslo_vmware.api [None req-ad0e8743-1b34-40b4-b722-0ba93834c694 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': task-1947790, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1904.469088] env[62405]: DEBUG oslo_vmware.api [None req-c53a7ace-30cc-43ab-9679-2e54a8736aab tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52339a3b-2df8-b6e2-e0e3-1ed1ddcff66a, 'name': SearchDatastore_Task, 'duration_secs': 0.008287} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1904.470126] env[62405]: DEBUG nova.network.neutron [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 3b9a6a82-a426-4802-9640-5b39e5e0ff49] Updating instance_info_cache with network_info: [{"id": "ea9789f3-0da8-4e46-8cde-6c9ccb5b562d", "address": "fa:16:3e:85:cb:8a", "network": {"id": "c3707aa8-0488-4e47-a655-7bb7788995c0", "bridge": "br-int", "label": "tempest-ServersTestJSON-2054002489-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ba2fba100b943a2a415ec37b9365388", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "68ec9c06-8680-4a41-abad-cddbd1f768c9", "external-id": "nsx-vlan-transportzone-883", "segmentation_id": 883, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapea9789f3-0d", "ovs_interfaceid": "ea9789f3-0da8-4e46-8cde-6c9ccb5b562d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1904.472328] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b6409c7e-7a20-4a11-bb28-1b8ea8f79120 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.477312] env[62405]: DEBUG oslo_vmware.api [None req-c53a7ace-30cc-43ab-9679-2e54a8736aab tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Waiting for the task: (returnval){ [ 1904.477312] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]528d1c19-3962-9d2f-422a-95bedc864931" [ 1904.477312] env[62405]: _type = "Task" [ 1904.477312] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1904.480836] env[62405]: DEBUG nova.network.neutron [req-c43e7e77-324f-4137-a5b5-56da84e219c4 req-c439f260-12fc-4432-adaa-5157571deafb service nova] [instance: 41e5385d-f0c7-4431-8424-e60dbeebaf8e] Updated VIF entry in instance network info cache for port 546c09a6-a133-400e-b556-9b225a501a58. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1904.481199] env[62405]: DEBUG nova.network.neutron [req-c43e7e77-324f-4137-a5b5-56da84e219c4 req-c439f260-12fc-4432-adaa-5157571deafb service nova] [instance: 41e5385d-f0c7-4431-8424-e60dbeebaf8e] Updating instance_info_cache with network_info: [{"id": "546c09a6-a133-400e-b556-9b225a501a58", "address": "fa:16:3e:7e:ca:43", "network": {"id": "b8b3a499-0f88-47ce-900b-2cc51e1d123f", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-5251275-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1d2ff9a8cb1840889a4a2a87c663f59e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6399297e-11b6-47b0-9a9f-712bb90b6ea1", "external-id": "nsx-vlan-transportzone-213", "segmentation_id": 213, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap546c09a6-a1", "ovs_interfaceid": "546c09a6-a133-400e-b556-9b225a501a58", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1904.490745] env[62405]: DEBUG oslo_vmware.api [None req-c53a7ace-30cc-43ab-9679-2e54a8736aab tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]528d1c19-3962-9d2f-422a-95bedc864931, 'name': SearchDatastore_Task, 'duration_secs': 0.009356} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1904.490993] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c53a7ace-30cc-43ab-9679-2e54a8736aab tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1904.491291] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-c53a7ace-30cc-43ab-9679-2e54a8736aab tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 41e5385d-f0c7-4431-8424-e60dbeebaf8e/41e5385d-f0c7-4431-8424-e60dbeebaf8e.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1904.491571] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d0c67c78-2760-45bb-9f7b-576b5f8cc58d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.499389] env[62405]: DEBUG oslo_vmware.api [None req-c53a7ace-30cc-43ab-9679-2e54a8736aab tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Waiting for the task: (returnval){ [ 1904.499389] env[62405]: value = "task-1947791" [ 1904.499389] env[62405]: _type = "Task" [ 1904.499389] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1904.507379] env[62405]: DEBUG oslo_vmware.api [None req-c53a7ace-30cc-43ab-9679-2e54a8736aab tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1947791, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1904.514673] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 6fcfada3-d73a-4814-bf45-d34b26d76d4a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1904.514941] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Total usable vcpus: 48, total allocated vcpus: 12 {{(pid=62405) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1904.515100] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2816MB phys_disk=200GB used_disk=12GB total_vcpus=48 used_vcpus=12 pci_stats=[] {{(pid=62405) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1904.799213] env[62405]: DEBUG oslo_vmware.api [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1947789, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1904.812728] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-375623dd-b6fe-4855-bf49-341ac2655700 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.820292] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a15ca858-dd3b-4a94-9edf-d0255c8f43b8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.850626] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1f54264-c2df-4447-9073-8e81e6de916c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.857949] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96371e9f-7d89-485a-84ec-af9ec1897c4a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.876474] env[62405]: DEBUG nova.compute.provider_tree [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1904.959118] env[62405]: DEBUG oslo_vmware.api [None req-ad0e8743-1b34-40b4-b722-0ba93834c694 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': task-1947790, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1904.972851] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Releasing lock "refresh_cache-3b9a6a82-a426-4802-9640-5b39e5e0ff49" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1904.973267] env[62405]: DEBUG nova.compute.manager [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 3b9a6a82-a426-4802-9640-5b39e5e0ff49] Instance network_info: |[{"id": "ea9789f3-0da8-4e46-8cde-6c9ccb5b562d", "address": "fa:16:3e:85:cb:8a", "network": {"id": "c3707aa8-0488-4e47-a655-7bb7788995c0", "bridge": "br-int", "label": "tempest-ServersTestJSON-2054002489-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ba2fba100b943a2a415ec37b9365388", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "68ec9c06-8680-4a41-abad-cddbd1f768c9", "external-id": "nsx-vlan-transportzone-883", "segmentation_id": 883, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapea9789f3-0d", "ovs_interfaceid": "ea9789f3-0da8-4e46-8cde-6c9ccb5b562d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1904.973584] env[62405]: DEBUG oslo_concurrency.lockutils [req-f633661c-4bae-4143-bb8e-350871825cc0 req-33db90e3-6795-484d-9746-72728edb7cdd service nova] Acquired lock "refresh_cache-3b9a6a82-a426-4802-9640-5b39e5e0ff49" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1904.973790] env[62405]: DEBUG nova.network.neutron [req-f633661c-4bae-4143-bb8e-350871825cc0 req-33db90e3-6795-484d-9746-72728edb7cdd service nova] [instance: 3b9a6a82-a426-4802-9640-5b39e5e0ff49] Refreshing network info cache for port ea9789f3-0da8-4e46-8cde-6c9ccb5b562d {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1904.975110] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 3b9a6a82-a426-4802-9640-5b39e5e0ff49] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:85:cb:8a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '68ec9c06-8680-4a41-abad-cddbd1f768c9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ea9789f3-0da8-4e46-8cde-6c9ccb5b562d', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1904.982852] env[62405]: DEBUG oslo.service.loopingcall [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1904.983958] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3b9a6a82-a426-4802-9640-5b39e5e0ff49] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1904.984443] env[62405]: DEBUG oslo_concurrency.lockutils [req-c43e7e77-324f-4137-a5b5-56da84e219c4 req-c439f260-12fc-4432-adaa-5157571deafb service nova] Releasing lock "refresh_cache-41e5385d-f0c7-4431-8424-e60dbeebaf8e" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1904.984810] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-775913a5-c8fb-4a91-8543-e09327f2e76b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.004899] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1905.004899] env[62405]: value = "task-1947792" [ 1905.004899] env[62405]: _type = "Task" [ 1905.004899] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1905.009122] env[62405]: DEBUG oslo_vmware.api [None req-c53a7ace-30cc-43ab-9679-2e54a8736aab tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1947791, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1905.018134] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947792, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1905.137845] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-db095d67-8972-456d-a475-f7f18507a1ab tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1905.138194] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-41be3f2d-c02c-458e-9e30-168fb5be7842 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.145960] env[62405]: DEBUG oslo_vmware.api [None req-db095d67-8972-456d-a475-f7f18507a1ab tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Waiting for the task: (returnval){ [ 1905.145960] env[62405]: value = "task-1947793" [ 1905.145960] env[62405]: _type = "Task" [ 1905.145960] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1905.153959] env[62405]: DEBUG oslo_vmware.api [None req-db095d67-8972-456d-a475-f7f18507a1ab tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': task-1947793, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1905.297449] env[62405]: DEBUG oslo_vmware.api [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1947789, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.472473} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1905.297610] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d/78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1905.297936] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1905.298958] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-04558fa7-9f46-4cda-a0fc-22a1c5d27dcf {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.305844] env[62405]: DEBUG oslo_vmware.api [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Waiting for the task: (returnval){ [ 1905.305844] env[62405]: value = "task-1947794" [ 1905.305844] env[62405]: _type = "Task" [ 1905.305844] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1905.314333] env[62405]: DEBUG oslo_vmware.api [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1947794, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1905.379899] env[62405]: DEBUG nova.scheduler.client.report [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1905.459095] env[62405]: DEBUG oslo_vmware.api [None req-ad0e8743-1b34-40b4-b722-0ba93834c694 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': task-1947790, 'name': CreateSnapshot_Task, 'duration_secs': 1.462578} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1905.459399] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-ad0e8743-1b34-40b4-b722-0ba93834c694 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: 06dbb3e0-876e-4290-81f5-6f95f9d5cb37] Created Snapshot of the VM instance {{(pid=62405) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1905.460249] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a40b7087-8f89-4b11-8eab-ebef42a59771 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.517144] env[62405]: DEBUG oslo_vmware.api [None req-c53a7ace-30cc-43ab-9679-2e54a8736aab tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1947791, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1905.523479] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947792, 'name': CreateVM_Task} progress is 25%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1905.657278] env[62405]: DEBUG oslo_vmware.api [None req-db095d67-8972-456d-a475-f7f18507a1ab tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': task-1947793, 'name': PowerOffVM_Task, 'duration_secs': 0.300025} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1905.657562] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-db095d67-8972-456d-a475-f7f18507a1ab tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1905.658709] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e39e2f07-5d60-4676-b1d1-edd72c68994e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.681674] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-010676a2-d2e3-484e-8c2a-2ec24e129ba8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.817532] env[62405]: DEBUG oslo_vmware.api [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1947794, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077161} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1905.817827] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1905.822183] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9edf9b45-659c-4c4b-8409-1fe169d5e2e9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.843775] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Reconfiguring VM instance instance-00000059 to attach disk [datastore1] 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d/78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1905.844070] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5ae3e198-c39b-4334-be2c-894b0d295baa {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.861057] env[62405]: DEBUG nova.network.neutron [req-f633661c-4bae-4143-bb8e-350871825cc0 req-33db90e3-6795-484d-9746-72728edb7cdd service nova] [instance: 3b9a6a82-a426-4802-9640-5b39e5e0ff49] Updated VIF entry in instance network info cache for port ea9789f3-0da8-4e46-8cde-6c9ccb5b562d. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1905.861411] env[62405]: DEBUG nova.network.neutron [req-f633661c-4bae-4143-bb8e-350871825cc0 req-33db90e3-6795-484d-9746-72728edb7cdd service nova] [instance: 3b9a6a82-a426-4802-9640-5b39e5e0ff49] Updating instance_info_cache with network_info: [{"id": "ea9789f3-0da8-4e46-8cde-6c9ccb5b562d", "address": "fa:16:3e:85:cb:8a", "network": {"id": "c3707aa8-0488-4e47-a655-7bb7788995c0", "bridge": "br-int", "label": "tempest-ServersTestJSON-2054002489-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ba2fba100b943a2a415ec37b9365388", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "68ec9c06-8680-4a41-abad-cddbd1f768c9", "external-id": "nsx-vlan-transportzone-883", "segmentation_id": 883, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapea9789f3-0d", "ovs_interfaceid": "ea9789f3-0da8-4e46-8cde-6c9ccb5b562d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1905.863991] env[62405]: DEBUG oslo_vmware.api [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Waiting for the task: (returnval){ [ 1905.863991] env[62405]: value = "task-1947795" [ 1905.863991] env[62405]: _type = "Task" [ 1905.863991] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1905.874239] env[62405]: DEBUG oslo_vmware.api [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1947795, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1905.884634] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62405) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1905.884936] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.416s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1905.885627] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4b7d658d-85cf-4c1f-b5d9-0ffe7c9aee52 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 28.978s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1905.980416] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-ad0e8743-1b34-40b4-b722-0ba93834c694 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: 06dbb3e0-876e-4290-81f5-6f95f9d5cb37] Creating linked-clone VM from snapshot {{(pid=62405) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1905.980741] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-0b42c395-71c8-4e9b-bebe-d2e89fc0d890 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.991248] env[62405]: DEBUG oslo_vmware.api [None req-ad0e8743-1b34-40b4-b722-0ba93834c694 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Waiting for the task: (returnval){ [ 1905.991248] env[62405]: value = "task-1947796" [ 1905.991248] env[62405]: _type = "Task" [ 1905.991248] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1905.998438] env[62405]: DEBUG oslo_vmware.api [None req-ad0e8743-1b34-40b4-b722-0ba93834c694 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': task-1947796, 'name': CloneVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1906.014281] env[62405]: DEBUG oslo_vmware.api [None req-c53a7ace-30cc-43ab-9679-2e54a8736aab tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1947791, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.188237} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1906.015197] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-c53a7ace-30cc-43ab-9679-2e54a8736aab tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 41e5385d-f0c7-4431-8424-e60dbeebaf8e/41e5385d-f0c7-4431-8424-e60dbeebaf8e.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1906.015461] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-c53a7ace-30cc-43ab-9679-2e54a8736aab tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 41e5385d-f0c7-4431-8424-e60dbeebaf8e] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1906.015797] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7207916d-743b-479c-a1dc-a198d10431c2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.022336] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947792, 'name': CreateVM_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1906.028775] env[62405]: DEBUG oslo_vmware.api [None req-c53a7ace-30cc-43ab-9679-2e54a8736aab tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Waiting for the task: (returnval){ [ 1906.028775] env[62405]: value = "task-1947797" [ 1906.028775] env[62405]: _type = "Task" [ 1906.028775] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1906.041966] env[62405]: DEBUG oslo_vmware.api [None req-c53a7ace-30cc-43ab-9679-2e54a8736aab tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1947797, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1906.192731] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-db095d67-8972-456d-a475-f7f18507a1ab tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Creating Snapshot of the VM instance {{(pid=62405) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1906.193077] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-a13573c3-9466-4cb2-90a8-87b0d65606e8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.200631] env[62405]: DEBUG oslo_vmware.api [None req-db095d67-8972-456d-a475-f7f18507a1ab tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Waiting for the task: (returnval){ [ 1906.200631] env[62405]: value = "task-1947798" [ 1906.200631] env[62405]: _type = "Task" [ 1906.200631] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1906.209355] env[62405]: DEBUG oslo_vmware.api [None req-db095d67-8972-456d-a475-f7f18507a1ab tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': task-1947798, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1906.369037] env[62405]: DEBUG oslo_concurrency.lockutils [req-f633661c-4bae-4143-bb8e-350871825cc0 req-33db90e3-6795-484d-9746-72728edb7cdd service nova] Releasing lock "refresh_cache-3b9a6a82-a426-4802-9640-5b39e5e0ff49" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1906.374857] env[62405]: DEBUG oslo_vmware.api [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1947795, 'name': ReconfigVM_Task, 'duration_secs': 0.282007} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1906.375166] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Reconfigured VM instance instance-00000059 to attach disk [datastore1] 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d/78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1906.375845] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-52a305a1-8bd6-4eba-b9a8-313fbf640415 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.383507] env[62405]: DEBUG oslo_vmware.api [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Waiting for the task: (returnval){ [ 1906.383507] env[62405]: value = "task-1947799" [ 1906.383507] env[62405]: _type = "Task" [ 1906.383507] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1906.391181] env[62405]: INFO nova.compute.claims [None req-4b7d658d-85cf-4c1f-b5d9-0ffe7c9aee52 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: 81aebf11-5d80-4a86-b232-3ecc5f3892c2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1906.397957] env[62405]: DEBUG oslo_vmware.api [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1947799, 'name': Rename_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1906.500803] env[62405]: DEBUG oslo_vmware.api [None req-ad0e8743-1b34-40b4-b722-0ba93834c694 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': task-1947796, 'name': CloneVM_Task} progress is 93%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1906.520440] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947792, 'name': CreateVM_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1906.538522] env[62405]: DEBUG oslo_vmware.api [None req-c53a7ace-30cc-43ab-9679-2e54a8736aab tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1947797, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.113184} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1906.539142] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-c53a7ace-30cc-43ab-9679-2e54a8736aab tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 41e5385d-f0c7-4431-8424-e60dbeebaf8e] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1906.540419] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3267093-9c7d-4790-bdb5-39414fd70840 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.571922] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-c53a7ace-30cc-43ab-9679-2e54a8736aab tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 41e5385d-f0c7-4431-8424-e60dbeebaf8e] Reconfiguring VM instance instance-0000005a to attach disk [datastore1] 41e5385d-f0c7-4431-8424-e60dbeebaf8e/41e5385d-f0c7-4431-8424-e60dbeebaf8e.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1906.572306] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5acd9498-3958-4a79-8595-457a4ed1f826 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.593402] env[62405]: DEBUG oslo_vmware.api [None req-c53a7ace-30cc-43ab-9679-2e54a8736aab tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Waiting for the task: (returnval){ [ 1906.593402] env[62405]: value = "task-1947800" [ 1906.593402] env[62405]: _type = "Task" [ 1906.593402] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1906.601782] env[62405]: DEBUG oslo_vmware.api [None req-c53a7ace-30cc-43ab-9679-2e54a8736aab tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1947800, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1906.712206] env[62405]: DEBUG oslo_vmware.api [None req-db095d67-8972-456d-a475-f7f18507a1ab tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': task-1947798, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1906.892814] env[62405]: DEBUG oslo_vmware.api [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1947799, 'name': Rename_Task, 'duration_secs': 0.288304} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1906.893515] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1906.893515] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3ea7a5f8-0e6d-49fa-84f5-d32c74baf579 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.899965] env[62405]: INFO nova.compute.resource_tracker [None req-4b7d658d-85cf-4c1f-b5d9-0ffe7c9aee52 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: 81aebf11-5d80-4a86-b232-3ecc5f3892c2] Updating resource usage from migration e28e937c-0b2d-4bf2-9f78-c26841fab210 [ 1906.903765] env[62405]: DEBUG oslo_vmware.api [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Waiting for the task: (returnval){ [ 1906.903765] env[62405]: value = "task-1947801" [ 1906.903765] env[62405]: _type = "Task" [ 1906.903765] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1906.911831] env[62405]: DEBUG oslo_vmware.api [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1947801, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1907.006103] env[62405]: DEBUG oslo_vmware.api [None req-ad0e8743-1b34-40b4-b722-0ba93834c694 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': task-1947796, 'name': CloneVM_Task} progress is 94%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1907.018905] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947792, 'name': CreateVM_Task, 'duration_secs': 1.769517} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1907.021544] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3b9a6a82-a426-4802-9640-5b39e5e0ff49] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1907.023457] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1907.023457] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1907.023457] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1907.027020] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0be8f81a-1a9f-4e12-88d0-04818f82e63b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.029334] env[62405]: DEBUG oslo_vmware.api [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Waiting for the task: (returnval){ [ 1907.029334] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]524db9f2-6b11-f396-3d9a-5b08efa9d608" [ 1907.029334] env[62405]: _type = "Task" [ 1907.029334] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1907.037892] env[62405]: DEBUG oslo_vmware.api [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]524db9f2-6b11-f396-3d9a-5b08efa9d608, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1907.102841] env[62405]: DEBUG oslo_vmware.api [None req-c53a7ace-30cc-43ab-9679-2e54a8736aab tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1947800, 'name': ReconfigVM_Task, 'duration_secs': 0.304903} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1907.105623] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-c53a7ace-30cc-43ab-9679-2e54a8736aab tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 41e5385d-f0c7-4431-8424-e60dbeebaf8e] Reconfigured VM instance instance-0000005a to attach disk [datastore1] 41e5385d-f0c7-4431-8424-e60dbeebaf8e/41e5385d-f0c7-4431-8424-e60dbeebaf8e.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1907.107246] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-55b945f3-161f-4d70-a506-3f2f55ee8f4a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.113777] env[62405]: DEBUG oslo_vmware.api [None req-c53a7ace-30cc-43ab-9679-2e54a8736aab tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Waiting for the task: (returnval){ [ 1907.113777] env[62405]: value = "task-1947802" [ 1907.113777] env[62405]: _type = "Task" [ 1907.113777] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1907.124483] env[62405]: DEBUG oslo_vmware.api [None req-c53a7ace-30cc-43ab-9679-2e54a8736aab tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1947802, 'name': Rename_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1907.186203] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea0eb1a5-08c5-48aa-b25a-c061cba90ef3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.194371] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cafba522-190e-41f9-8d84-946f06e5d7cc {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.228789] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a46b7611-f7ce-41eb-858c-9d91849df77b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.236731] env[62405]: DEBUG oslo_vmware.api [None req-db095d67-8972-456d-a475-f7f18507a1ab tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': task-1947798, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1907.239906] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6e7ed89-39f0-464e-9a8f-784def98e9a1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.253256] env[62405]: DEBUG nova.compute.provider_tree [None req-4b7d658d-85cf-4c1f-b5d9-0ffe7c9aee52 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1907.417781] env[62405]: DEBUG oslo_vmware.api [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1947801, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1907.506293] env[62405]: DEBUG oslo_vmware.api [None req-ad0e8743-1b34-40b4-b722-0ba93834c694 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': task-1947796, 'name': CloneVM_Task} progress is 94%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1907.540243] env[62405]: DEBUG oslo_vmware.api [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]524db9f2-6b11-f396-3d9a-5b08efa9d608, 'name': SearchDatastore_Task, 'duration_secs': 0.013152} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1907.540632] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1907.541026] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 3b9a6a82-a426-4802-9640-5b39e5e0ff49] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1907.541133] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1907.541296] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1907.541480] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1907.541744] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8129ea0a-7cc6-499d-9cd7-479b6dbbcefb {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.550355] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1907.550595] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1907.551396] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c9362285-7f19-4fcc-a6dd-f8576d6dd31f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.557083] env[62405]: DEBUG oslo_vmware.api [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Waiting for the task: (returnval){ [ 1907.557083] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52bfbf97-0266-28c3-e561-05ac54b87dff" [ 1907.557083] env[62405]: _type = "Task" [ 1907.557083] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1907.566217] env[62405]: DEBUG oslo_vmware.api [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52bfbf97-0266-28c3-e561-05ac54b87dff, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1907.623560] env[62405]: DEBUG oslo_vmware.api [None req-c53a7ace-30cc-43ab-9679-2e54a8736aab tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1947802, 'name': Rename_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1907.734443] env[62405]: DEBUG oslo_vmware.api [None req-db095d67-8972-456d-a475-f7f18507a1ab tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': task-1947798, 'name': CreateSnapshot_Task, 'duration_secs': 1.446146} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1907.734727] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-db095d67-8972-456d-a475-f7f18507a1ab tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Created Snapshot of the VM instance {{(pid=62405) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1907.735614] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7750fb66-f1f6-4f4c-8ab1-d994f204fc75 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.790839] env[62405]: DEBUG nova.scheduler.client.report [None req-4b7d658d-85cf-4c1f-b5d9-0ffe7c9aee52 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Updated inventory for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with generation 139 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1907.791121] env[62405]: DEBUG nova.compute.provider_tree [None req-4b7d658d-85cf-4c1f-b5d9-0ffe7c9aee52 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Updating resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 generation from 139 to 140 during operation: update_inventory {{(pid=62405) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1907.791308] env[62405]: DEBUG nova.compute.provider_tree [None req-4b7d658d-85cf-4c1f-b5d9-0ffe7c9aee52 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1907.917960] env[62405]: DEBUG oslo_vmware.api [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1947801, 'name': PowerOnVM_Task, 'duration_secs': 0.540987} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1907.917960] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1907.918246] env[62405]: INFO nova.compute.manager [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Took 8.80 seconds to spawn the instance on the hypervisor. [ 1907.918491] env[62405]: DEBUG nova.compute.manager [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1907.919330] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c28a8c1c-ece8-4f97-8846-5086c893643e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1908.005806] env[62405]: DEBUG oslo_vmware.api [None req-ad0e8743-1b34-40b4-b722-0ba93834c694 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': task-1947796, 'name': CloneVM_Task} progress is 100%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1908.068454] env[62405]: DEBUG oslo_vmware.api [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52bfbf97-0266-28c3-e561-05ac54b87dff, 'name': SearchDatastore_Task, 'duration_secs': 0.013554} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1908.069330] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-808a929c-ea6c-4ba1-ad17-eeaa065e2d1e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1908.077837] env[62405]: DEBUG oslo_vmware.api [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Waiting for the task: (returnval){ [ 1908.077837] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]529a968b-84b1-3e71-b2d6-cb1dc105088a" [ 1908.077837] env[62405]: _type = "Task" [ 1908.077837] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1908.088164] env[62405]: DEBUG oslo_vmware.api [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]529a968b-84b1-3e71-b2d6-cb1dc105088a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1908.125723] env[62405]: DEBUG oslo_vmware.api [None req-c53a7ace-30cc-43ab-9679-2e54a8736aab tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1947802, 'name': Rename_Task, 'duration_secs': 0.865886} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1908.126014] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-c53a7ace-30cc-43ab-9679-2e54a8736aab tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 41e5385d-f0c7-4431-8424-e60dbeebaf8e] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1908.126271] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e1c0ada1-3d80-4984-b057-aa57d4ce2db6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1908.134683] env[62405]: DEBUG oslo_vmware.api [None req-c53a7ace-30cc-43ab-9679-2e54a8736aab tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Waiting for the task: (returnval){ [ 1908.134683] env[62405]: value = "task-1947803" [ 1908.134683] env[62405]: _type = "Task" [ 1908.134683] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1908.143324] env[62405]: DEBUG oslo_vmware.api [None req-c53a7ace-30cc-43ab-9679-2e54a8736aab tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1947803, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1908.254431] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-db095d67-8972-456d-a475-f7f18507a1ab tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Creating linked-clone VM from snapshot {{(pid=62405) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1908.254580] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-ccc96b2c-f169-4a47-ac6b-ce882adad76a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1908.263321] env[62405]: DEBUG oslo_vmware.api [None req-db095d67-8972-456d-a475-f7f18507a1ab tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Waiting for the task: (returnval){ [ 1908.263321] env[62405]: value = "task-1947804" [ 1908.263321] env[62405]: _type = "Task" [ 1908.263321] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1908.270984] env[62405]: DEBUG oslo_vmware.api [None req-db095d67-8972-456d-a475-f7f18507a1ab tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': task-1947804, 'name': CloneVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1908.296767] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4b7d658d-85cf-4c1f-b5d9-0ffe7c9aee52 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.411s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1908.297113] env[62405]: INFO nova.compute.manager [None req-4b7d658d-85cf-4c1f-b5d9-0ffe7c9aee52 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: 81aebf11-5d80-4a86-b232-3ecc5f3892c2] Migrating [ 1908.303959] env[62405]: DEBUG oslo_concurrency.lockutils [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.588s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1908.305573] env[62405]: INFO nova.compute.claims [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1908.442562] env[62405]: INFO nova.compute.manager [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Took 36.99 seconds to build instance. [ 1908.507129] env[62405]: DEBUG oslo_vmware.api [None req-ad0e8743-1b34-40b4-b722-0ba93834c694 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': task-1947796, 'name': CloneVM_Task, 'duration_secs': 2.115337} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1908.507472] env[62405]: INFO nova.virt.vmwareapi.vmops [None req-ad0e8743-1b34-40b4-b722-0ba93834c694 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: 06dbb3e0-876e-4290-81f5-6f95f9d5cb37] Created linked-clone VM from snapshot [ 1908.508214] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0572e9f1-56d0-4d2c-8544-33a23c922653 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1908.516384] env[62405]: DEBUG nova.virt.vmwareapi.images [None req-ad0e8743-1b34-40b4-b722-0ba93834c694 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: 06dbb3e0-876e-4290-81f5-6f95f9d5cb37] Uploading image 5a25c6cc-ac07-4e8a-86c3-1be0579465dc {{(pid=62405) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1908.533763] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad0e8743-1b34-40b4-b722-0ba93834c694 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: 06dbb3e0-876e-4290-81f5-6f95f9d5cb37] Destroying the VM {{(pid=62405) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1908.533763] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-1ce0fc85-babb-4113-89aa-ace483d32f7c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1908.540340] env[62405]: DEBUG oslo_vmware.api [None req-ad0e8743-1b34-40b4-b722-0ba93834c694 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Waiting for the task: (returnval){ [ 1908.540340] env[62405]: value = "task-1947805" [ 1908.540340] env[62405]: _type = "Task" [ 1908.540340] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1908.549381] env[62405]: DEBUG oslo_vmware.api [None req-ad0e8743-1b34-40b4-b722-0ba93834c694 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': task-1947805, 'name': Destroy_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1908.592025] env[62405]: DEBUG oslo_vmware.api [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]529a968b-84b1-3e71-b2d6-cb1dc105088a, 'name': SearchDatastore_Task, 'duration_secs': 0.016461} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1908.592025] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1908.592025] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 3b9a6a82-a426-4802-9640-5b39e5e0ff49/3b9a6a82-a426-4802-9640-5b39e5e0ff49.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1908.592025] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1ea4e77a-efcf-46f9-8d20-4235741aac71 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1908.599509] env[62405]: DEBUG oslo_vmware.api [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Waiting for the task: (returnval){ [ 1908.599509] env[62405]: value = "task-1947806" [ 1908.599509] env[62405]: _type = "Task" [ 1908.599509] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1908.610938] env[62405]: DEBUG oslo_vmware.api [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947806, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1908.648869] env[62405]: DEBUG oslo_vmware.api [None req-c53a7ace-30cc-43ab-9679-2e54a8736aab tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1947803, 'name': PowerOnVM_Task, 'duration_secs': 0.487723} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1908.649101] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-c53a7ace-30cc-43ab-9679-2e54a8736aab tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 41e5385d-f0c7-4431-8424-e60dbeebaf8e] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1908.649617] env[62405]: INFO nova.compute.manager [None req-c53a7ace-30cc-43ab-9679-2e54a8736aab tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 41e5385d-f0c7-4431-8424-e60dbeebaf8e] Took 8.64 seconds to spawn the instance on the hypervisor. [ 1908.649617] env[62405]: DEBUG nova.compute.manager [None req-c53a7ace-30cc-43ab-9679-2e54a8736aab tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 41e5385d-f0c7-4431-8424-e60dbeebaf8e] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1908.650396] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-114d47d0-0816-4fc9-87db-7fa1e7482a25 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1908.776024] env[62405]: DEBUG oslo_vmware.api [None req-db095d67-8972-456d-a475-f7f18507a1ab tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': task-1947804, 'name': CloneVM_Task} progress is 94%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1908.821341] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4b7d658d-85cf-4c1f-b5d9-0ffe7c9aee52 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquiring lock "refresh_cache-81aebf11-5d80-4a86-b232-3ecc5f3892c2" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1908.821341] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4b7d658d-85cf-4c1f-b5d9-0ffe7c9aee52 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquired lock "refresh_cache-81aebf11-5d80-4a86-b232-3ecc5f3892c2" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1908.821341] env[62405]: DEBUG nova.network.neutron [None req-4b7d658d-85cf-4c1f-b5d9-0ffe7c9aee52 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: 81aebf11-5d80-4a86-b232-3ecc5f3892c2] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1908.944571] env[62405]: DEBUG oslo_concurrency.lockutils [None req-70181c1e-4991-4002-85f3-361b8e10bea9 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Lock "78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.502s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1909.051718] env[62405]: DEBUG oslo_vmware.api [None req-ad0e8743-1b34-40b4-b722-0ba93834c694 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': task-1947805, 'name': Destroy_Task} progress is 33%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1909.113345] env[62405]: DEBUG oslo_vmware.api [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947806, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1909.170858] env[62405]: INFO nova.compute.manager [None req-c53a7ace-30cc-43ab-9679-2e54a8736aab tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 41e5385d-f0c7-4431-8424-e60dbeebaf8e] Took 37.08 seconds to build instance. [ 1909.278979] env[62405]: DEBUG oslo_vmware.api [None req-db095d67-8972-456d-a475-f7f18507a1ab tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': task-1947804, 'name': CloneVM_Task} progress is 94%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1909.551397] env[62405]: DEBUG oslo_vmware.api [None req-ad0e8743-1b34-40b4-b722-0ba93834c694 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': task-1947805, 'name': Destroy_Task, 'duration_secs': 0.733691} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1909.551731] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-ad0e8743-1b34-40b4-b722-0ba93834c694 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: 06dbb3e0-876e-4290-81f5-6f95f9d5cb37] Destroyed the VM [ 1909.552026] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-ad0e8743-1b34-40b4-b722-0ba93834c694 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: 06dbb3e0-876e-4290-81f5-6f95f9d5cb37] Deleting Snapshot of the VM instance {{(pid=62405) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1909.552326] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-2e39902a-7e88-4688-886d-6edf87b1646a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.558407] env[62405]: DEBUG oslo_vmware.api [None req-ad0e8743-1b34-40b4-b722-0ba93834c694 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Waiting for the task: (returnval){ [ 1909.558407] env[62405]: value = "task-1947807" [ 1909.558407] env[62405]: _type = "Task" [ 1909.558407] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1909.570249] env[62405]: DEBUG oslo_vmware.api [None req-ad0e8743-1b34-40b4-b722-0ba93834c694 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': task-1947807, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1909.571209] env[62405]: DEBUG nova.network.neutron [None req-4b7d658d-85cf-4c1f-b5d9-0ffe7c9aee52 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: 81aebf11-5d80-4a86-b232-3ecc5f3892c2] Updating instance_info_cache with network_info: [{"id": "af199d5b-90da-4443-ac9d-e8d6bf721a80", "address": "fa:16:3e:80:08:de", "network": {"id": "845c4582-a0c8-4565-8025-5cc0fd22ff9a", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1066509768-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f1a1645e38674042828c78155974f95e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaf199d5b-90", "ovs_interfaceid": "af199d5b-90da-4443-ac9d-e8d6bf721a80", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1909.610811] env[62405]: DEBUG oslo_vmware.api [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947806, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1909.628374] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2f407ff-558f-452c-945c-f4f5b348701a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.638085] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94175a3b-af67-4392-8e1b-01a576ed069f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.672062] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb5f498a-43d0-451b-9d27-120f271103c4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.676213] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c53a7ace-30cc-43ab-9679-2e54a8736aab tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Lock "41e5385d-f0c7-4431-8424-e60dbeebaf8e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.602s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1909.682745] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6ffe4c2-92d8-4686-a05d-303b9b594356 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.701591] env[62405]: DEBUG nova.compute.provider_tree [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1909.780180] env[62405]: DEBUG oslo_vmware.api [None req-db095d67-8972-456d-a475-f7f18507a1ab tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': task-1947804, 'name': CloneVM_Task} progress is 95%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1910.068881] env[62405]: DEBUG oslo_vmware.api [None req-ad0e8743-1b34-40b4-b722-0ba93834c694 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': task-1947807, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1910.073681] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4b7d658d-85cf-4c1f-b5d9-0ffe7c9aee52 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Releasing lock "refresh_cache-81aebf11-5d80-4a86-b232-3ecc5f3892c2" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1910.117689] env[62405]: DEBUG oslo_vmware.api [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947806, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1910.205509] env[62405]: DEBUG nova.scheduler.client.report [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1910.281042] env[62405]: DEBUG oslo_vmware.api [None req-db095d67-8972-456d-a475-f7f18507a1ab tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': task-1947804, 'name': CloneVM_Task, 'duration_secs': 1.946981} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1910.281543] env[62405]: INFO nova.virt.vmwareapi.vmops [None req-db095d67-8972-456d-a475-f7f18507a1ab tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Created linked-clone VM from snapshot [ 1910.282362] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bb15a9c-c0bb-4052-a655-bdef1264accf {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.290720] env[62405]: DEBUG nova.virt.vmwareapi.images [None req-db095d67-8972-456d-a475-f7f18507a1ab tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Uploading image 6ee02a73-5729-47e1-93a1-23fefdcafc1e {{(pid=62405) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1910.320205] env[62405]: DEBUG oslo_vmware.rw_handles [None req-db095d67-8972-456d-a475-f7f18507a1ab tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1910.320205] env[62405]: value = "vm-401535" [ 1910.320205] env[62405]: _type = "VirtualMachine" [ 1910.320205] env[62405]: }. {{(pid=62405) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1910.320517] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-e98d7e4a-b254-4e82-be74-1d22b14f2ff0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.328401] env[62405]: DEBUG oslo_vmware.rw_handles [None req-db095d67-8972-456d-a475-f7f18507a1ab tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Lease: (returnval){ [ 1910.328401] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5238c52a-15e7-a69b-5f63-00174c4b9fd2" [ 1910.328401] env[62405]: _type = "HttpNfcLease" [ 1910.328401] env[62405]: } obtained for exporting VM: (result){ [ 1910.328401] env[62405]: value = "vm-401535" [ 1910.328401] env[62405]: _type = "VirtualMachine" [ 1910.328401] env[62405]: }. {{(pid=62405) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1910.328798] env[62405]: DEBUG oslo_vmware.api [None req-db095d67-8972-456d-a475-f7f18507a1ab tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Waiting for the lease: (returnval){ [ 1910.328798] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5238c52a-15e7-a69b-5f63-00174c4b9fd2" [ 1910.328798] env[62405]: _type = "HttpNfcLease" [ 1910.328798] env[62405]: } to be ready. {{(pid=62405) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1910.336085] env[62405]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1910.336085] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5238c52a-15e7-a69b-5f63-00174c4b9fd2" [ 1910.336085] env[62405]: _type = "HttpNfcLease" [ 1910.336085] env[62405]: } is initializing. {{(pid=62405) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1910.570419] env[62405]: DEBUG oslo_vmware.api [None req-ad0e8743-1b34-40b4-b722-0ba93834c694 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': task-1947807, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1910.612897] env[62405]: DEBUG oslo_vmware.api [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947806, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.60544} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1910.613253] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 3b9a6a82-a426-4802-9640-5b39e5e0ff49/3b9a6a82-a426-4802-9640-5b39e5e0ff49.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1910.613490] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 3b9a6a82-a426-4802-9640-5b39e5e0ff49] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1910.613760] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a7dbdee4-56e6-40e1-b0c5-d1b11d0b6259 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.622231] env[62405]: DEBUG oslo_vmware.api [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Waiting for the task: (returnval){ [ 1910.622231] env[62405]: value = "task-1947809" [ 1910.622231] env[62405]: _type = "Task" [ 1910.622231] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1910.631866] env[62405]: DEBUG oslo_vmware.api [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947809, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1910.712246] env[62405]: DEBUG oslo_concurrency.lockutils [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.408s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1910.712785] env[62405]: DEBUG nova.compute.manager [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1910.716854] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ba987cca-0cd8-4aa3-98a6-5b3a973bb27e tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 22.857s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1910.717122] env[62405]: DEBUG nova.objects.instance [None req-ba987cca-0cd8-4aa3-98a6-5b3a973bb27e tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] [instance: 8f133517-cff2-40c7-8333-a9116163313a] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62405) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1910.837611] env[62405]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1910.837611] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5238c52a-15e7-a69b-5f63-00174c4b9fd2" [ 1910.837611] env[62405]: _type = "HttpNfcLease" [ 1910.837611] env[62405]: } is ready. {{(pid=62405) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1910.838061] env[62405]: DEBUG oslo_vmware.rw_handles [None req-db095d67-8972-456d-a475-f7f18507a1ab tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1910.838061] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5238c52a-15e7-a69b-5f63-00174c4b9fd2" [ 1910.838061] env[62405]: _type = "HttpNfcLease" [ 1910.838061] env[62405]: }. {{(pid=62405) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1910.838822] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31b4ad73-c019-4318-84cd-58d008f3683c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.847274] env[62405]: DEBUG oslo_vmware.rw_handles [None req-db095d67-8972-456d-a475-f7f18507a1ab tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5201cff7-3ef6-277f-73c5-5a0dce72e54c/disk-0.vmdk from lease info. {{(pid=62405) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1910.847274] env[62405]: DEBUG oslo_vmware.rw_handles [None req-db095d67-8972-456d-a475-f7f18507a1ab tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5201cff7-3ef6-277f-73c5-5a0dce72e54c/disk-0.vmdk for reading. {{(pid=62405) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1910.942259] env[62405]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-7252b037-5cc4-4693-bd3d-3632301dcef0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.071430] env[62405]: DEBUG oslo_vmware.api [None req-ad0e8743-1b34-40b4-b722-0ba93834c694 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': task-1947807, 'name': RemoveSnapshot_Task, 'duration_secs': 1.241066} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1911.071731] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-ad0e8743-1b34-40b4-b722-0ba93834c694 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: 06dbb3e0-876e-4290-81f5-6f95f9d5cb37] Deleted Snapshot of the VM instance {{(pid=62405) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1911.131950] env[62405]: DEBUG oslo_vmware.api [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947809, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.125767} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1911.132697] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 3b9a6a82-a426-4802-9640-5b39e5e0ff49] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1911.134689] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e4a1441-99e4-4717-83fd-aea2cc2f0acf {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.162723] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 3b9a6a82-a426-4802-9640-5b39e5e0ff49] Reconfiguring VM instance instance-0000005b to attach disk [datastore1] 3b9a6a82-a426-4802-9640-5b39e5e0ff49/3b9a6a82-a426-4802-9640-5b39e5e0ff49.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1911.163072] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0e7746f7-cca4-46ac-ae81-d6e999adc4ed {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.188620] env[62405]: DEBUG oslo_vmware.api [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Waiting for the task: (returnval){ [ 1911.188620] env[62405]: value = "task-1947810" [ 1911.188620] env[62405]: _type = "Task" [ 1911.188620] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1911.198035] env[62405]: DEBUG oslo_vmware.api [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947810, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1911.220981] env[62405]: DEBUG oslo_concurrency.lockutils [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Acquiring lock "f269844b-a9b4-40a2-8ba4-a62ee59b4e40" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1911.221292] env[62405]: DEBUG oslo_concurrency.lockutils [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Lock "f269844b-a9b4-40a2-8ba4-a62ee59b4e40" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1911.225392] env[62405]: DEBUG nova.compute.utils [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1911.228242] env[62405]: DEBUG nova.compute.manager [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1911.228419] env[62405]: DEBUG nova.network.neutron [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1911.274312] env[62405]: DEBUG nova.policy [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f9c18747ac7149dba0e1c0a8fc6c0b7e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dd9a1a4650b34e388c50c7575cf09a7c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1911.513744] env[62405]: DEBUG oslo_concurrency.lockutils [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Acquiring lock "a91a6d04-2ec0-4568-bdb3-732d148644de" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1911.513979] env[62405]: DEBUG oslo_concurrency.lockutils [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Lock "a91a6d04-2ec0-4568-bdb3-732d148644de" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.004s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1911.579018] env[62405]: WARNING nova.compute.manager [None req-ad0e8743-1b34-40b4-b722-0ba93834c694 tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: 06dbb3e0-876e-4290-81f5-6f95f9d5cb37] Image not found during snapshot: nova.exception.ImageNotFound: Image 5a25c6cc-ac07-4e8a-86c3-1be0579465dc could not be found. [ 1911.588299] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-575950bd-89d7-4e89-80b7-0090961314a4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.611616] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-4b7d658d-85cf-4c1f-b5d9-0ffe7c9aee52 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: 81aebf11-5d80-4a86-b232-3ecc5f3892c2] Updating instance '81aebf11-5d80-4a86-b232-3ecc5f3892c2' progress to 0 {{(pid=62405) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1911.647980] env[62405]: DEBUG nova.network.neutron [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Successfully created port: a7c7d269-027f-42d9-819a-e04ab445d816 {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1911.705918] env[62405]: DEBUG oslo_vmware.api [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947810, 'name': ReconfigVM_Task, 'duration_secs': 0.395499} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1911.706357] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 3b9a6a82-a426-4802-9640-5b39e5e0ff49] Reconfigured VM instance instance-0000005b to attach disk [datastore1] 3b9a6a82-a426-4802-9640-5b39e5e0ff49/3b9a6a82-a426-4802-9640-5b39e5e0ff49.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1911.709159] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-288bd278-0257-4e1c-8579-dc5cc38e4e36 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.718035] env[62405]: DEBUG oslo_vmware.api [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Waiting for the task: (returnval){ [ 1911.718035] env[62405]: value = "task-1947811" [ 1911.718035] env[62405]: _type = "Task" [ 1911.718035] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1911.727862] env[62405]: DEBUG oslo_vmware.api [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947811, 'name': Rename_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1911.728465] env[62405]: DEBUG nova.compute.manager [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: f269844b-a9b4-40a2-8ba4-a62ee59b4e40] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1911.736376] env[62405]: DEBUG nova.compute.manager [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1911.740280] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ba987cca-0cd8-4aa3-98a6-5b3a973bb27e tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.023s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1911.741869] env[62405]: DEBUG oslo_concurrency.lockutils [None req-94307d94-a95d-4f55-a557-307a834f6cba tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.064s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1911.742192] env[62405]: DEBUG oslo_concurrency.lockutils [None req-94307d94-a95d-4f55-a557-307a834f6cba tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1911.744365] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.405s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1911.746170] env[62405]: INFO nova.compute.claims [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1911.777913] env[62405]: INFO nova.scheduler.client.report [None req-94307d94-a95d-4f55-a557-307a834f6cba tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Deleted allocations for instance 9d97bf1d-6830-48b1-831b-bf2b52188f32 [ 1912.017046] env[62405]: DEBUG nova.compute.manager [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: a91a6d04-2ec0-4568-bdb3-732d148644de] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1912.068381] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8a887fc5-23f0-42a6-9c1b-69539adbf33e tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Acquiring lock "06dbb3e0-876e-4290-81f5-6f95f9d5cb37" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1912.068381] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8a887fc5-23f0-42a6-9c1b-69539adbf33e tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Lock "06dbb3e0-876e-4290-81f5-6f95f9d5cb37" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1912.068381] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8a887fc5-23f0-42a6-9c1b-69539adbf33e tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Acquiring lock "06dbb3e0-876e-4290-81f5-6f95f9d5cb37-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1912.068381] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8a887fc5-23f0-42a6-9c1b-69539adbf33e tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Lock "06dbb3e0-876e-4290-81f5-6f95f9d5cb37-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1912.068381] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8a887fc5-23f0-42a6-9c1b-69539adbf33e tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Lock "06dbb3e0-876e-4290-81f5-6f95f9d5cb37-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1912.071809] env[62405]: INFO nova.compute.manager [None req-8a887fc5-23f0-42a6-9c1b-69539adbf33e tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: 06dbb3e0-876e-4290-81f5-6f95f9d5cb37] Terminating instance [ 1912.116647] env[62405]: DEBUG oslo_concurrency.lockutils [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Acquiring lock "0d2b305d-d754-413c-afdf-3a2e8f143891" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1912.116929] env[62405]: DEBUG oslo_concurrency.lockutils [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Lock "0d2b305d-d754-413c-afdf-3a2e8f143891" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1912.119296] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b7d658d-85cf-4c1f-b5d9-0ffe7c9aee52 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: 81aebf11-5d80-4a86-b232-3ecc5f3892c2] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1912.120863] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6ceeef62-5722-4515-82c9-038831f8c310 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.130961] env[62405]: DEBUG oslo_vmware.api [None req-4b7d658d-85cf-4c1f-b5d9-0ffe7c9aee52 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for the task: (returnval){ [ 1912.130961] env[62405]: value = "task-1947812" [ 1912.130961] env[62405]: _type = "Task" [ 1912.130961] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1912.146595] env[62405]: DEBUG oslo_vmware.api [None req-4b7d658d-85cf-4c1f-b5d9-0ffe7c9aee52 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947812, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1912.228109] env[62405]: DEBUG oslo_vmware.api [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947811, 'name': Rename_Task, 'duration_secs': 0.17851} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1912.228603] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 3b9a6a82-a426-4802-9640-5b39e5e0ff49] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1912.228990] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-24810df3-2de3-4171-90b7-2521fbf012e1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.235556] env[62405]: DEBUG oslo_vmware.api [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Waiting for the task: (returnval){ [ 1912.235556] env[62405]: value = "task-1947813" [ 1912.235556] env[62405]: _type = "Task" [ 1912.235556] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1912.254835] env[62405]: DEBUG oslo_vmware.api [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947813, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1912.272862] env[62405]: DEBUG oslo_concurrency.lockutils [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1912.293588] env[62405]: DEBUG oslo_concurrency.lockutils [None req-94307d94-a95d-4f55-a557-307a834f6cba tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Lock "9d97bf1d-6830-48b1-831b-bf2b52188f32" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.770s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1912.537840] env[62405]: DEBUG oslo_concurrency.lockutils [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1912.576809] env[62405]: DEBUG nova.compute.manager [None req-8a887fc5-23f0-42a6-9c1b-69539adbf33e tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: 06dbb3e0-876e-4290-81f5-6f95f9d5cb37] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1912.576809] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-8a887fc5-23f0-42a6-9c1b-69539adbf33e tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: 06dbb3e0-876e-4290-81f5-6f95f9d5cb37] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1912.578182] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-505b341e-489b-4941-8578-eb21ec28063c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.587130] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a887fc5-23f0-42a6-9c1b-69539adbf33e tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: 06dbb3e0-876e-4290-81f5-6f95f9d5cb37] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1912.587539] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-398ffc57-99d1-4410-89ab-104f1170ab4e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.594316] env[62405]: DEBUG oslo_vmware.api [None req-8a887fc5-23f0-42a6-9c1b-69539adbf33e tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Waiting for the task: (returnval){ [ 1912.594316] env[62405]: value = "task-1947814" [ 1912.594316] env[62405]: _type = "Task" [ 1912.594316] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1912.602757] env[62405]: DEBUG oslo_vmware.api [None req-8a887fc5-23f0-42a6-9c1b-69539adbf33e tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': task-1947814, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1912.621783] env[62405]: DEBUG nova.compute.manager [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 0d2b305d-d754-413c-afdf-3a2e8f143891] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1912.642708] env[62405]: DEBUG oslo_vmware.api [None req-4b7d658d-85cf-4c1f-b5d9-0ffe7c9aee52 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947812, 'name': PowerOffVM_Task, 'duration_secs': 0.217859} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1912.643186] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b7d658d-85cf-4c1f-b5d9-0ffe7c9aee52 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: 81aebf11-5d80-4a86-b232-3ecc5f3892c2] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1912.646300] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-4b7d658d-85cf-4c1f-b5d9-0ffe7c9aee52 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: 81aebf11-5d80-4a86-b232-3ecc5f3892c2] Updating instance '81aebf11-5d80-4a86-b232-3ecc5f3892c2' progress to 17 {{(pid=62405) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1912.747086] env[62405]: DEBUG oslo_vmware.api [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947813, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1912.751265] env[62405]: DEBUG nova.compute.manager [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1912.788057] env[62405]: DEBUG nova.virt.hardware [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1912.788370] env[62405]: DEBUG nova.virt.hardware [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1912.788548] env[62405]: DEBUG nova.virt.hardware [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1912.788769] env[62405]: DEBUG nova.virt.hardware [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1912.789946] env[62405]: DEBUG nova.virt.hardware [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1912.789946] env[62405]: DEBUG nova.virt.hardware [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1912.790707] env[62405]: DEBUG nova.virt.hardware [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1912.790874] env[62405]: DEBUG nova.virt.hardware [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1912.791103] env[62405]: DEBUG nova.virt.hardware [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1912.791331] env[62405]: DEBUG nova.virt.hardware [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1912.791571] env[62405]: DEBUG nova.virt.hardware [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1912.792680] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aeff5980-b8c3-40d9-b947-8c88df809cbf {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.804521] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a2b520e-8ea8-4531-956c-41668902154a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1913.062019] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2a96d82-c124-454d-a579-1384b27595f3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1913.071116] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d7f3a3b-fee0-4242-9587-f4ffe85dd39f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1913.107436] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f72e5b91-f12c-48a2-9457-76a0920ad405 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1913.116968] env[62405]: DEBUG oslo_vmware.api [None req-8a887fc5-23f0-42a6-9c1b-69539adbf33e tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': task-1947814, 'name': PowerOffVM_Task, 'duration_secs': 0.193814} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1913.119393] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a887fc5-23f0-42a6-9c1b-69539adbf33e tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: 06dbb3e0-876e-4290-81f5-6f95f9d5cb37] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1913.119576] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-8a887fc5-23f0-42a6-9c1b-69539adbf33e tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: 06dbb3e0-876e-4290-81f5-6f95f9d5cb37] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1913.119859] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2588c897-398d-4bcf-801f-6b29e58754f7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1913.122489] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a96fabe-63dc-4615-ac32-91a99602bcd7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1913.139652] env[62405]: DEBUG nova.compute.provider_tree [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1913.152474] env[62405]: DEBUG nova.virt.hardware [None req-4b7d658d-85cf-4c1f-b5d9-0ffe7c9aee52 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:21Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1913.152831] env[62405]: DEBUG nova.virt.hardware [None req-4b7d658d-85cf-4c1f-b5d9-0ffe7c9aee52 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1913.152928] env[62405]: DEBUG nova.virt.hardware [None req-4b7d658d-85cf-4c1f-b5d9-0ffe7c9aee52 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1913.153095] env[62405]: DEBUG nova.virt.hardware [None req-4b7d658d-85cf-4c1f-b5d9-0ffe7c9aee52 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1913.153434] env[62405]: DEBUG nova.virt.hardware [None req-4b7d658d-85cf-4c1f-b5d9-0ffe7c9aee52 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1913.153642] env[62405]: DEBUG nova.virt.hardware [None req-4b7d658d-85cf-4c1f-b5d9-0ffe7c9aee52 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1913.153803] env[62405]: DEBUG nova.virt.hardware [None req-4b7d658d-85cf-4c1f-b5d9-0ffe7c9aee52 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1913.154532] env[62405]: DEBUG nova.virt.hardware [None req-4b7d658d-85cf-4c1f-b5d9-0ffe7c9aee52 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1913.154532] env[62405]: DEBUG nova.virt.hardware [None req-4b7d658d-85cf-4c1f-b5d9-0ffe7c9aee52 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1913.154532] env[62405]: DEBUG nova.virt.hardware [None req-4b7d658d-85cf-4c1f-b5d9-0ffe7c9aee52 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1913.154721] env[62405]: DEBUG nova.virt.hardware [None req-4b7d658d-85cf-4c1f-b5d9-0ffe7c9aee52 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1913.160918] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7a0c0f99-b5a5-4790-84ce-4844c9cd891a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1913.171789] env[62405]: DEBUG oslo_concurrency.lockutils [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1913.178400] env[62405]: DEBUG oslo_vmware.api [None req-4b7d658d-85cf-4c1f-b5d9-0ffe7c9aee52 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for the task: (returnval){ [ 1913.178400] env[62405]: value = "task-1947816" [ 1913.178400] env[62405]: _type = "Task" [ 1913.178400] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1913.188287] env[62405]: DEBUG oslo_vmware.api [None req-4b7d658d-85cf-4c1f-b5d9-0ffe7c9aee52 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947816, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1913.238511] env[62405]: DEBUG nova.compute.manager [req-6a41918c-0582-4b5e-8c84-c8b2e974166b req-957361ba-35f7-492b-804d-c7aa8a8a9c02 service nova] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Received event network-vif-plugged-a7c7d269-027f-42d9-819a-e04ab445d816 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1913.238719] env[62405]: DEBUG oslo_concurrency.lockutils [req-6a41918c-0582-4b5e-8c84-c8b2e974166b req-957361ba-35f7-492b-804d-c7aa8a8a9c02 service nova] Acquiring lock "b495f9e6-60c8-4509-a34f-2e7ed59b6d82-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1913.238925] env[62405]: DEBUG oslo_concurrency.lockutils [req-6a41918c-0582-4b5e-8c84-c8b2e974166b req-957361ba-35f7-492b-804d-c7aa8a8a9c02 service nova] Lock "b495f9e6-60c8-4509-a34f-2e7ed59b6d82-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1913.239120] env[62405]: DEBUG oslo_concurrency.lockutils [req-6a41918c-0582-4b5e-8c84-c8b2e974166b req-957361ba-35f7-492b-804d-c7aa8a8a9c02 service nova] Lock "b495f9e6-60c8-4509-a34f-2e7ed59b6d82-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1913.239289] env[62405]: DEBUG nova.compute.manager [req-6a41918c-0582-4b5e-8c84-c8b2e974166b req-957361ba-35f7-492b-804d-c7aa8a8a9c02 service nova] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] No waiting events found dispatching network-vif-plugged-a7c7d269-027f-42d9-819a-e04ab445d816 {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1913.239452] env[62405]: WARNING nova.compute.manager [req-6a41918c-0582-4b5e-8c84-c8b2e974166b req-957361ba-35f7-492b-804d-c7aa8a8a9c02 service nova] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Received unexpected event network-vif-plugged-a7c7d269-027f-42d9-819a-e04ab445d816 for instance with vm_state building and task_state spawning. [ 1913.250564] env[62405]: DEBUG oslo_vmware.api [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947813, 'name': PowerOnVM_Task, 'duration_secs': 0.655395} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1913.250830] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 3b9a6a82-a426-4802-9640-5b39e5e0ff49] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1913.251044] env[62405]: INFO nova.compute.manager [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 3b9a6a82-a426-4802-9640-5b39e5e0ff49] Took 10.76 seconds to spawn the instance on the hypervisor. [ 1913.251230] env[62405]: DEBUG nova.compute.manager [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 3b9a6a82-a426-4802-9640-5b39e5e0ff49] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1913.251961] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b3e8963-bd34-4527-81e7-68ce0f934f0a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1913.351052] env[62405]: DEBUG nova.network.neutron [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Successfully updated port: a7c7d269-027f-42d9-819a-e04ab445d816 {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1913.464614] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-8a887fc5-23f0-42a6-9c1b-69539adbf33e tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: 06dbb3e0-876e-4290-81f5-6f95f9d5cb37] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1913.464794] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-8a887fc5-23f0-42a6-9c1b-69539adbf33e tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: 06dbb3e0-876e-4290-81f5-6f95f9d5cb37] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1913.465069] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a887fc5-23f0-42a6-9c1b-69539adbf33e tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Deleting the datastore file [datastore1] 06dbb3e0-876e-4290-81f5-6f95f9d5cb37 {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1913.465400] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fd4236e4-bba6-4e39-b5d5-91631ec82a06 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1913.475036] env[62405]: DEBUG oslo_vmware.api [None req-8a887fc5-23f0-42a6-9c1b-69539adbf33e tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Waiting for the task: (returnval){ [ 1913.475036] env[62405]: value = "task-1947817" [ 1913.475036] env[62405]: _type = "Task" [ 1913.475036] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1913.483549] env[62405]: DEBUG oslo_vmware.api [None req-8a887fc5-23f0-42a6-9c1b-69539adbf33e tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': task-1947817, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1913.644909] env[62405]: DEBUG nova.scheduler.client.report [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1913.688821] env[62405]: DEBUG oslo_vmware.api [None req-4b7d658d-85cf-4c1f-b5d9-0ffe7c9aee52 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947816, 'name': ReconfigVM_Task, 'duration_secs': 0.418254} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1913.689161] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-4b7d658d-85cf-4c1f-b5d9-0ffe7c9aee52 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: 81aebf11-5d80-4a86-b232-3ecc5f3892c2] Updating instance '81aebf11-5d80-4a86-b232-3ecc5f3892c2' progress to 33 {{(pid=62405) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1913.768370] env[62405]: INFO nova.compute.manager [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 3b9a6a82-a426-4802-9640-5b39e5e0ff49] Took 41.60 seconds to build instance. [ 1913.853847] env[62405]: DEBUG oslo_concurrency.lockutils [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Acquiring lock "refresh_cache-b495f9e6-60c8-4509-a34f-2e7ed59b6d82" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1913.854075] env[62405]: DEBUG oslo_concurrency.lockutils [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Acquired lock "refresh_cache-b495f9e6-60c8-4509-a34f-2e7ed59b6d82" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1913.854261] env[62405]: DEBUG nova.network.neutron [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1913.985182] env[62405]: DEBUG oslo_vmware.api [None req-8a887fc5-23f0-42a6-9c1b-69539adbf33e tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': task-1947817, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.450692} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1913.985467] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a887fc5-23f0-42a6-9c1b-69539adbf33e tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1913.985653] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-8a887fc5-23f0-42a6-9c1b-69539adbf33e tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: 06dbb3e0-876e-4290-81f5-6f95f9d5cb37] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1913.985867] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-8a887fc5-23f0-42a6-9c1b-69539adbf33e tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: 06dbb3e0-876e-4290-81f5-6f95f9d5cb37] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1913.986076] env[62405]: INFO nova.compute.manager [None req-8a887fc5-23f0-42a6-9c1b-69539adbf33e tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: 06dbb3e0-876e-4290-81f5-6f95f9d5cb37] Took 1.41 seconds to destroy the instance on the hypervisor. [ 1913.986326] env[62405]: DEBUG oslo.service.loopingcall [None req-8a887fc5-23f0-42a6-9c1b-69539adbf33e tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1913.986518] env[62405]: DEBUG nova.compute.manager [-] [instance: 06dbb3e0-876e-4290-81f5-6f95f9d5cb37] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1913.986613] env[62405]: DEBUG nova.network.neutron [-] [instance: 06dbb3e0-876e-4290-81f5-6f95f9d5cb37] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1914.152355] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.405s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1914.152355] env[62405]: DEBUG nova.compute.manager [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1914.153109] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0d7e6b08-7f72-4b04-9aac-3f761d272373 tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.238s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1914.153351] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0d7e6b08-7f72-4b04-9aac-3f761d272373 tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1914.155313] env[62405]: DEBUG oslo_concurrency.lockutils [None req-acac08d9-89b6-4d95-b005-8df87420e172 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.841s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1914.155563] env[62405]: DEBUG oslo_concurrency.lockutils [None req-acac08d9-89b6-4d95-b005-8df87420e172 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1914.157674] env[62405]: DEBUG oslo_concurrency.lockutils [None req-498743dc-41af-4c87-aa82-0c815e0fe830 tempest-ServersAdmin275Test-1825578340 tempest-ServersAdmin275Test-1825578340-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 15.736s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1914.157881] env[62405]: DEBUG nova.objects.instance [None req-498743dc-41af-4c87-aa82-0c815e0fe830 tempest-ServersAdmin275Test-1825578340 tempest-ServersAdmin275Test-1825578340-project-admin] [instance: 8f133517-cff2-40c7-8333-a9116163313a] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62405) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1914.179107] env[62405]: INFO nova.scheduler.client.report [None req-0d7e6b08-7f72-4b04-9aac-3f761d272373 tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] Deleted allocations for instance 4c8c0d2f-d8d3-4422-8a5c-8999636b22be [ 1914.182350] env[62405]: INFO nova.scheduler.client.report [None req-acac08d9-89b6-4d95-b005-8df87420e172 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Deleted allocations for instance 2c623c00-92f2-4cc4-8503-963c3308d708 [ 1914.202016] env[62405]: DEBUG nova.virt.hardware [None req-4b7d658d-85cf-4c1f-b5d9-0ffe7c9aee52 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1914.202016] env[62405]: DEBUG nova.virt.hardware [None req-4b7d658d-85cf-4c1f-b5d9-0ffe7c9aee52 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1914.202016] env[62405]: DEBUG nova.virt.hardware [None req-4b7d658d-85cf-4c1f-b5d9-0ffe7c9aee52 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1914.202016] env[62405]: DEBUG nova.virt.hardware [None req-4b7d658d-85cf-4c1f-b5d9-0ffe7c9aee52 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1914.202016] env[62405]: DEBUG nova.virt.hardware [None req-4b7d658d-85cf-4c1f-b5d9-0ffe7c9aee52 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1914.202016] env[62405]: DEBUG nova.virt.hardware [None req-4b7d658d-85cf-4c1f-b5d9-0ffe7c9aee52 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1914.202016] env[62405]: DEBUG nova.virt.hardware [None req-4b7d658d-85cf-4c1f-b5d9-0ffe7c9aee52 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1914.202016] env[62405]: DEBUG nova.virt.hardware [None req-4b7d658d-85cf-4c1f-b5d9-0ffe7c9aee52 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1914.202016] env[62405]: DEBUG nova.virt.hardware [None req-4b7d658d-85cf-4c1f-b5d9-0ffe7c9aee52 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1914.202016] env[62405]: DEBUG nova.virt.hardware [None req-4b7d658d-85cf-4c1f-b5d9-0ffe7c9aee52 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1914.202488] env[62405]: DEBUG nova.virt.hardware [None req-4b7d658d-85cf-4c1f-b5d9-0ffe7c9aee52 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1914.211350] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-4b7d658d-85cf-4c1f-b5d9-0ffe7c9aee52 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: 81aebf11-5d80-4a86-b232-3ecc5f3892c2] Reconfiguring VM instance instance-00000053 to detach disk 2000 {{(pid=62405) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1914.212665] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e3c33a98-bba8-4075-9b9b-76af58a15d41 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.244543] env[62405]: DEBUG oslo_vmware.api [None req-4b7d658d-85cf-4c1f-b5d9-0ffe7c9aee52 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for the task: (returnval){ [ 1914.244543] env[62405]: value = "task-1947818" [ 1914.244543] env[62405]: _type = "Task" [ 1914.244543] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1914.261038] env[62405]: DEBUG oslo_vmware.api [None req-4b7d658d-85cf-4c1f-b5d9-0ffe7c9aee52 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947818, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1914.271195] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ae54bffa-57a4-4e72-b9a6-70e7bd0b7b4c tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Lock "3b9a6a82-a426-4802-9640-5b39e5e0ff49" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 43.132s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1914.355237] env[62405]: DEBUG oslo_concurrency.lockutils [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquiring lock "556e1bca-f2f1-4200-96df-997d48ce5a15" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1914.355487] env[62405]: DEBUG oslo_concurrency.lockutils [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Lock "556e1bca-f2f1-4200-96df-997d48ce5a15" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1914.384431] env[62405]: DEBUG nova.network.neutron [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1914.510881] env[62405]: DEBUG nova.network.neutron [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Updating instance_info_cache with network_info: [{"id": "a7c7d269-027f-42d9-819a-e04ab445d816", "address": "fa:16:3e:c7:9c:e0", "network": {"id": "f9883b88-e1ff-4499-9214-4cc672383a10", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1580742860-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dd9a1a4650b34e388c50c7575cf09a7c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0a88b707-352e-4be7-b1d6-ad6074b40ed9", "external-id": "nsx-vlan-transportzone-789", "segmentation_id": 789, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa7c7d269-02", "ovs_interfaceid": "a7c7d269-027f-42d9-819a-e04ab445d816", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1914.662736] env[62405]: DEBUG nova.compute.utils [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1914.667195] env[62405]: DEBUG nova.compute.manager [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1914.667390] env[62405]: DEBUG nova.network.neutron [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1914.688231] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0d7e6b08-7f72-4b04-9aac-3f761d272373 tempest-ServersTestJSON-228240997 tempest-ServersTestJSON-228240997-project-member] Lock "4c8c0d2f-d8d3-4422-8a5c-8999636b22be" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.063s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1914.691750] env[62405]: DEBUG oslo_concurrency.lockutils [None req-acac08d9-89b6-4d95-b005-8df87420e172 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Lock "2c623c00-92f2-4cc4-8503-963c3308d708" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.201s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1914.707290] env[62405]: DEBUG nova.policy [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4d60f47dfb7e4334b9b7ceb5d3c6aaab', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '28cfe90f16b140018a5802c02f751d9c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1914.754532] env[62405]: DEBUG oslo_vmware.api [None req-4b7d658d-85cf-4c1f-b5d9-0ffe7c9aee52 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947818, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1914.785103] env[62405]: DEBUG nova.network.neutron [-] [instance: 06dbb3e0-876e-4290-81f5-6f95f9d5cb37] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1914.859795] env[62405]: DEBUG nova.compute.manager [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1915.015582] env[62405]: DEBUG oslo_concurrency.lockutils [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Releasing lock "refresh_cache-b495f9e6-60c8-4509-a34f-2e7ed59b6d82" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1915.015995] env[62405]: DEBUG nova.compute.manager [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Instance network_info: |[{"id": "a7c7d269-027f-42d9-819a-e04ab445d816", "address": "fa:16:3e:c7:9c:e0", "network": {"id": "f9883b88-e1ff-4499-9214-4cc672383a10", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1580742860-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dd9a1a4650b34e388c50c7575cf09a7c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0a88b707-352e-4be7-b1d6-ad6074b40ed9", "external-id": "nsx-vlan-transportzone-789", "segmentation_id": 789, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa7c7d269-02", "ovs_interfaceid": "a7c7d269-027f-42d9-819a-e04ab445d816", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1915.016488] env[62405]: DEBUG oslo_concurrency.lockutils [None req-efd16a8c-b93f-464f-93c8-b2b1289fbd6a tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Acquiring lock "3b9a6a82-a426-4802-9640-5b39e5e0ff49" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1915.016752] env[62405]: DEBUG oslo_concurrency.lockutils [None req-efd16a8c-b93f-464f-93c8-b2b1289fbd6a tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Lock "3b9a6a82-a426-4802-9640-5b39e5e0ff49" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1915.017020] env[62405]: DEBUG oslo_concurrency.lockutils [None req-efd16a8c-b93f-464f-93c8-b2b1289fbd6a tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Acquiring lock "3b9a6a82-a426-4802-9640-5b39e5e0ff49-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1915.017236] env[62405]: DEBUG oslo_concurrency.lockutils [None req-efd16a8c-b93f-464f-93c8-b2b1289fbd6a tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Lock "3b9a6a82-a426-4802-9640-5b39e5e0ff49-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1915.017421] env[62405]: DEBUG oslo_concurrency.lockutils [None req-efd16a8c-b93f-464f-93c8-b2b1289fbd6a tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Lock "3b9a6a82-a426-4802-9640-5b39e5e0ff49-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1915.019262] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c7:9c:e0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0a88b707-352e-4be7-b1d6-ad6074b40ed9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a7c7d269-027f-42d9-819a-e04ab445d816', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1915.026835] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Creating folder: Project (dd9a1a4650b34e388c50c7575cf09a7c). Parent ref: group-v401284. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1915.027447] env[62405]: INFO nova.compute.manager [None req-efd16a8c-b93f-464f-93c8-b2b1289fbd6a tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 3b9a6a82-a426-4802-9640-5b39e5e0ff49] Terminating instance [ 1915.029399] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a07318a8-a3e7-4534-9338-72f7bf27ddf2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.041891] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Created folder: Project (dd9a1a4650b34e388c50c7575cf09a7c) in parent group-v401284. [ 1915.042120] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Creating folder: Instances. Parent ref: group-v401536. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1915.042383] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0e2b1955-55c4-483a-bfce-b0b9000300f5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.051671] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Created folder: Instances in parent group-v401536. [ 1915.052032] env[62405]: DEBUG oslo.service.loopingcall [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1915.052148] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1915.052376] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-346184e9-2f9d-4a2e-80fc-4021fba997a8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.072891] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1915.072891] env[62405]: value = "task-1947821" [ 1915.072891] env[62405]: _type = "Task" [ 1915.072891] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1915.088447] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947821, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1915.143498] env[62405]: DEBUG nova.network.neutron [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Successfully created port: e84f02c8-cde2-4f59-88cd-ef80e8cc1bba {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1915.168565] env[62405]: DEBUG nova.compute.manager [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1915.172336] env[62405]: DEBUG oslo_concurrency.lockutils [None req-498743dc-41af-4c87-aa82-0c815e0fe830 tempest-ServersAdmin275Test-1825578340 tempest-ServersAdmin275Test-1825578340-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.015s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1915.173732] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a0a5dfdf-88bc-494a-976e-f5f89edf5182 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.729s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1915.173939] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a0a5dfdf-88bc-494a-976e-f5f89edf5182 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1915.176163] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9aabdcb4-ff70-48a4-a746-e13602763515 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.881s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1915.176537] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9aabdcb4-ff70-48a4-a746-e13602763515 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1915.180734] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b4ff42ea-2473-423d-9440-c7342157735f tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.938s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1915.180980] env[62405]: DEBUG nova.objects.instance [None req-b4ff42ea-2473-423d-9440-c7342157735f tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Lazy-loading 'resources' on Instance uuid 8f133517-cff2-40c7-8333-a9116163313a {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1915.211973] env[62405]: INFO nova.scheduler.client.report [None req-9aabdcb4-ff70-48a4-a746-e13602763515 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Deleted allocations for instance 59fe34ab-c01d-4083-8bcd-ad6b4133a66f [ 1915.213813] env[62405]: INFO nova.scheduler.client.report [None req-a0a5dfdf-88bc-494a-976e-f5f89edf5182 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Deleted allocations for instance 08d7be6c-0557-46af-ae8d-e1c68e878cae [ 1915.256414] env[62405]: DEBUG oslo_vmware.api [None req-4b7d658d-85cf-4c1f-b5d9-0ffe7c9aee52 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947818, 'name': ReconfigVM_Task, 'duration_secs': 0.726268} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1915.257608] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-4b7d658d-85cf-4c1f-b5d9-0ffe7c9aee52 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: 81aebf11-5d80-4a86-b232-3ecc5f3892c2] Reconfigured VM instance instance-00000053 to detach disk 2000 {{(pid=62405) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1915.258465] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-affb5dc3-250c-40c7-b332-ba0be1ddd54c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.289302] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-4b7d658d-85cf-4c1f-b5d9-0ffe7c9aee52 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: 81aebf11-5d80-4a86-b232-3ecc5f3892c2] Reconfiguring VM instance instance-00000053 to attach disk [datastore1] 81aebf11-5d80-4a86-b232-3ecc5f3892c2/81aebf11-5d80-4a86-b232-3ecc5f3892c2.vmdk or device None with type thin {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1915.292068] env[62405]: INFO nova.compute.manager [-] [instance: 06dbb3e0-876e-4290-81f5-6f95f9d5cb37] Took 1.31 seconds to deallocate network for instance. [ 1915.292341] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e7d76c12-77cf-4269-bc5a-1dd9fd093d5a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.311836] env[62405]: DEBUG nova.compute.manager [req-2e715e98-886f-432b-b5f3-63f0ea129596 req-51a0a100-33b0-483e-9683-980c2b942395 service nova] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Received event network-changed-a7c7d269-027f-42d9-819a-e04ab445d816 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1915.312135] env[62405]: DEBUG nova.compute.manager [req-2e715e98-886f-432b-b5f3-63f0ea129596 req-51a0a100-33b0-483e-9683-980c2b942395 service nova] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Refreshing instance network info cache due to event network-changed-a7c7d269-027f-42d9-819a-e04ab445d816. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1915.312422] env[62405]: DEBUG oslo_concurrency.lockutils [req-2e715e98-886f-432b-b5f3-63f0ea129596 req-51a0a100-33b0-483e-9683-980c2b942395 service nova] Acquiring lock "refresh_cache-b495f9e6-60c8-4509-a34f-2e7ed59b6d82" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1915.312652] env[62405]: DEBUG oslo_concurrency.lockutils [req-2e715e98-886f-432b-b5f3-63f0ea129596 req-51a0a100-33b0-483e-9683-980c2b942395 service nova] Acquired lock "refresh_cache-b495f9e6-60c8-4509-a34f-2e7ed59b6d82" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1915.312846] env[62405]: DEBUG nova.network.neutron [req-2e715e98-886f-432b-b5f3-63f0ea129596 req-51a0a100-33b0-483e-9683-980c2b942395 service nova] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Refreshing network info cache for port a7c7d269-027f-42d9-819a-e04ab445d816 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1915.322108] env[62405]: DEBUG oslo_vmware.api [None req-4b7d658d-85cf-4c1f-b5d9-0ffe7c9aee52 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for the task: (returnval){ [ 1915.322108] env[62405]: value = "task-1947822" [ 1915.322108] env[62405]: _type = "Task" [ 1915.322108] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1915.336019] env[62405]: DEBUG oslo_vmware.api [None req-4b7d658d-85cf-4c1f-b5d9-0ffe7c9aee52 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947822, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1915.388517] env[62405]: DEBUG oslo_concurrency.lockutils [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1915.534394] env[62405]: DEBUG nova.compute.manager [None req-efd16a8c-b93f-464f-93c8-b2b1289fbd6a tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 3b9a6a82-a426-4802-9640-5b39e5e0ff49] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1915.534671] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-efd16a8c-b93f-464f-93c8-b2b1289fbd6a tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 3b9a6a82-a426-4802-9640-5b39e5e0ff49] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1915.535686] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48103c84-9502-45b0-971f-4daadd9887a6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.546938] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-efd16a8c-b93f-464f-93c8-b2b1289fbd6a tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 3b9a6a82-a426-4802-9640-5b39e5e0ff49] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1915.546938] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f4ff1ab4-5140-4e2a-9ef7-7c1b37938e77 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.553397] env[62405]: DEBUG oslo_vmware.api [None req-efd16a8c-b93f-464f-93c8-b2b1289fbd6a tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Waiting for the task: (returnval){ [ 1915.553397] env[62405]: value = "task-1947823" [ 1915.553397] env[62405]: _type = "Task" [ 1915.553397] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1915.561809] env[62405]: DEBUG oslo_vmware.api [None req-efd16a8c-b93f-464f-93c8-b2b1289fbd6a tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947823, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1915.586124] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947821, 'name': CreateVM_Task} progress is 25%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1915.728764] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a0a5dfdf-88bc-494a-976e-f5f89edf5182 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Lock "08d7be6c-0557-46af-ae8d-e1c68e878cae" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.870s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1915.730413] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9aabdcb4-ff70-48a4-a746-e13602763515 tempest-ImagesTestJSON-1176465240 tempest-ImagesTestJSON-1176465240-project-member] Lock "59fe34ab-c01d-4083-8bcd-ad6b4133a66f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.617s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1915.824889] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8a887fc5-23f0-42a6-9c1b-69539adbf33e tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1915.841415] env[62405]: DEBUG oslo_vmware.api [None req-4b7d658d-85cf-4c1f-b5d9-0ffe7c9aee52 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947822, 'name': ReconfigVM_Task, 'duration_secs': 0.39035} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1915.842017] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-4b7d658d-85cf-4c1f-b5d9-0ffe7c9aee52 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: 81aebf11-5d80-4a86-b232-3ecc5f3892c2] Reconfigured VM instance instance-00000053 to attach disk [datastore1] 81aebf11-5d80-4a86-b232-3ecc5f3892c2/81aebf11-5d80-4a86-b232-3ecc5f3892c2.vmdk or device None with type thin {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1915.843349] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-4b7d658d-85cf-4c1f-b5d9-0ffe7c9aee52 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: 81aebf11-5d80-4a86-b232-3ecc5f3892c2] Updating instance '81aebf11-5d80-4a86-b232-3ecc5f3892c2' progress to 50 {{(pid=62405) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1916.067098] env[62405]: DEBUG oslo_vmware.api [None req-efd16a8c-b93f-464f-93c8-b2b1289fbd6a tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947823, 'name': PowerOffVM_Task, 'duration_secs': 0.22644} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1916.067249] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-efd16a8c-b93f-464f-93c8-b2b1289fbd6a tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 3b9a6a82-a426-4802-9640-5b39e5e0ff49] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1916.067462] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-efd16a8c-b93f-464f-93c8-b2b1289fbd6a tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 3b9a6a82-a426-4802-9640-5b39e5e0ff49] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1916.069058] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e257ee32-4b28-4fe7-8e2e-f904c57bb837 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.072143] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a5d7ba5-96be-44d5-ac9b-9cbd1cf6adb9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.089771] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7a0821e-641c-4b77-944e-ac3fc0cb575f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.093757] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947821, 'name': CreateVM_Task} progress is 25%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1916.135317] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7744e8ef-d9f5-4eae-838f-c7237a07636e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.144587] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e726ba03-eddd-4319-9c4d-d6cfb522be16 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.159739] env[62405]: DEBUG nova.compute.provider_tree [None req-b4ff42ea-2473-423d-9440-c7342157735f tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1916.180567] env[62405]: DEBUG nova.compute.manager [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1916.212781] env[62405]: DEBUG nova.virt.hardware [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1916.213092] env[62405]: DEBUG nova.virt.hardware [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1916.213264] env[62405]: DEBUG nova.virt.hardware [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1916.213462] env[62405]: DEBUG nova.virt.hardware [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1916.213627] env[62405]: DEBUG nova.virt.hardware [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1916.213777] env[62405]: DEBUG nova.virt.hardware [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1916.214011] env[62405]: DEBUG nova.virt.hardware [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1916.214248] env[62405]: DEBUG nova.virt.hardware [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1916.214367] env[62405]: DEBUG nova.virt.hardware [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1916.214554] env[62405]: DEBUG nova.virt.hardware [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1916.214732] env[62405]: DEBUG nova.virt.hardware [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1916.215634] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5c4f334-1008-4ca3-9a04-9150fa3c54fa {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.224865] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-532383d5-2b19-49db-88bf-4afa2bbc87ac {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.270256] env[62405]: DEBUG nova.network.neutron [req-2e715e98-886f-432b-b5f3-63f0ea129596 req-51a0a100-33b0-483e-9683-980c2b942395 service nova] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Updated VIF entry in instance network info cache for port a7c7d269-027f-42d9-819a-e04ab445d816. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1916.270670] env[62405]: DEBUG nova.network.neutron [req-2e715e98-886f-432b-b5f3-63f0ea129596 req-51a0a100-33b0-483e-9683-980c2b942395 service nova] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Updating instance_info_cache with network_info: [{"id": "a7c7d269-027f-42d9-819a-e04ab445d816", "address": "fa:16:3e:c7:9c:e0", "network": {"id": "f9883b88-e1ff-4499-9214-4cc672383a10", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1580742860-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dd9a1a4650b34e388c50c7575cf09a7c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0a88b707-352e-4be7-b1d6-ad6074b40ed9", "external-id": "nsx-vlan-transportzone-789", "segmentation_id": 789, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa7c7d269-02", "ovs_interfaceid": "a7c7d269-027f-42d9-819a-e04ab445d816", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1916.357185] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44907e52-f363-44d9-8fb7-83efaa9a0d0a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.395173] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2928e8a-4696-4a5d-9049-bd118d977c55 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.421659] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-4b7d658d-85cf-4c1f-b5d9-0ffe7c9aee52 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: 81aebf11-5d80-4a86-b232-3ecc5f3892c2] Updating instance '81aebf11-5d80-4a86-b232-3ecc5f3892c2' progress to 67 {{(pid=62405) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1916.593053] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquiring lock "ec0a05fc-4a11-4e07-a03c-e357a7a750ab" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1916.593670] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Lock "ec0a05fc-4a11-4e07-a03c-e357a7a750ab" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1916.595040] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947821, 'name': CreateVM_Task} progress is 25%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1916.683021] env[62405]: ERROR nova.scheduler.client.report [None req-b4ff42ea-2473-423d-9440-c7342157735f tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] [req-d987f415-e1a5-4cda-931b-7d4f35bb9174] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7d5eded7-a501-4fa6-b1d3-60e273d555d7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-d987f415-e1a5-4cda-931b-7d4f35bb9174"}]} [ 1916.703811] env[62405]: DEBUG nova.scheduler.client.report [None req-b4ff42ea-2473-423d-9440-c7342157735f tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Refreshing inventories for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1916.721644] env[62405]: DEBUG nova.scheduler.client.report [None req-b4ff42ea-2473-423d-9440-c7342157735f tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Updating ProviderTree inventory for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1916.721644] env[62405]: DEBUG nova.compute.provider_tree [None req-b4ff42ea-2473-423d-9440-c7342157735f tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1916.735032] env[62405]: DEBUG nova.scheduler.client.report [None req-b4ff42ea-2473-423d-9440-c7342157735f tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Refreshing aggregate associations for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7, aggregates: None {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1916.759122] env[62405]: DEBUG nova.scheduler.client.report [None req-b4ff42ea-2473-423d-9440-c7342157735f tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Refreshing trait associations for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1916.774275] env[62405]: DEBUG oslo_concurrency.lockutils [req-2e715e98-886f-432b-b5f3-63f0ea129596 req-51a0a100-33b0-483e-9683-980c2b942395 service nova] Releasing lock "refresh_cache-b495f9e6-60c8-4509-a34f-2e7ed59b6d82" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1916.774275] env[62405]: DEBUG nova.compute.manager [req-2e715e98-886f-432b-b5f3-63f0ea129596 req-51a0a100-33b0-483e-9683-980c2b942395 service nova] [instance: 06dbb3e0-876e-4290-81f5-6f95f9d5cb37] Received event network-vif-deleted-339b22b0-3451-4284-a022-8823b059c08d {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1916.873850] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3ad51670-e052-4e7f-86d3-62f4d57080f7 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Acquiring lock "67bf25ea-5774-4246-a3e6-2aeb0ebf6731" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1916.874161] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3ad51670-e052-4e7f-86d3-62f4d57080f7 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Lock "67bf25ea-5774-4246-a3e6-2aeb0ebf6731" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1916.987293] env[62405]: DEBUG nova.network.neutron [None req-4b7d658d-85cf-4c1f-b5d9-0ffe7c9aee52 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: 81aebf11-5d80-4a86-b232-3ecc5f3892c2] Port af199d5b-90da-4443-ac9d-e8d6bf721a80 binding to destination host cpu-1 is already ACTIVE {{(pid=62405) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1917.093915] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947821, 'name': CreateVM_Task} progress is 25%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1917.099112] env[62405]: DEBUG nova.compute.manager [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: ec0a05fc-4a11-4e07-a03c-e357a7a750ab] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1917.100988] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84a75840-b3af-47b7-8511-d0e36af1b58a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.113238] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c22d1fd-d902-47f6-a1d9-1ab28f15f545 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.151296] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8275002c-0c87-4922-b711-836c2cfd0458 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.160720] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84e695b2-db06-40e6-90a9-365786623479 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.177736] env[62405]: DEBUG nova.compute.provider_tree [None req-b4ff42ea-2473-423d-9440-c7342157735f tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1917.380576] env[62405]: INFO nova.compute.manager [None req-3ad51670-e052-4e7f-86d3-62f4d57080f7 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 67bf25ea-5774-4246-a3e6-2aeb0ebf6731] Detaching volume dd61f776-0703-457f-8823-3fc5792787a0 [ 1917.421237] env[62405]: INFO nova.virt.block_device [None req-3ad51670-e052-4e7f-86d3-62f4d57080f7 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 67bf25ea-5774-4246-a3e6-2aeb0ebf6731] Attempting to driver detach volume dd61f776-0703-457f-8823-3fc5792787a0 from mountpoint /dev/sdb [ 1917.421518] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-3ad51670-e052-4e7f-86d3-62f4d57080f7 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 67bf25ea-5774-4246-a3e6-2aeb0ebf6731] Volume detach. Driver type: vmdk {{(pid=62405) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1917.421741] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-3ad51670-e052-4e7f-86d3-62f4d57080f7 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 67bf25ea-5774-4246-a3e6-2aeb0ebf6731] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-401433', 'volume_id': 'dd61f776-0703-457f-8823-3fc5792787a0', 'name': 'volume-dd61f776-0703-457f-8823-3fc5792787a0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '67bf25ea-5774-4246-a3e6-2aeb0ebf6731', 'attached_at': '', 'detached_at': '', 'volume_id': 'dd61f776-0703-457f-8823-3fc5792787a0', 'serial': 'dd61f776-0703-457f-8823-3fc5792787a0'} {{(pid=62405) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1917.422663] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86541734-0cc1-4bee-8c2c-75a2c75cecc7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.451866] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb1d3004-8add-431c-8540-702c076f94f6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.460366] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d817a06e-89df-4bb1-a84d-2de88df4af25 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.484213] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06533e5f-85e8-4750-82b4-43a8953dff62 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.507211] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-3ad51670-e052-4e7f-86d3-62f4d57080f7 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] The volume has not been displaced from its original location: [datastore1] volume-dd61f776-0703-457f-8823-3fc5792787a0/volume-dd61f776-0703-457f-8823-3fc5792787a0.vmdk. No consolidation needed. {{(pid=62405) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1917.513106] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-3ad51670-e052-4e7f-86d3-62f4d57080f7 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 67bf25ea-5774-4246-a3e6-2aeb0ebf6731] Reconfiguring VM instance instance-00000024 to detach disk 2001 {{(pid=62405) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1917.513106] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-92a11d1c-9c69-4d56-8302-de5f1016d463 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.532061] env[62405]: DEBUG oslo_vmware.api [None req-3ad51670-e052-4e7f-86d3-62f4d57080f7 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Waiting for the task: (returnval){ [ 1917.532061] env[62405]: value = "task-1947825" [ 1917.532061] env[62405]: _type = "Task" [ 1917.532061] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1917.546914] env[62405]: DEBUG oslo_vmware.api [None req-3ad51670-e052-4e7f-86d3-62f4d57080f7 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1947825, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1917.594976] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947821, 'name': CreateVM_Task} progress is 25%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1917.757765] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1917.775424] env[62405]: DEBUG nova.scheduler.client.report [None req-b4ff42ea-2473-423d-9440-c7342157735f tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Updated inventory for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with generation 145 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1917.775424] env[62405]: DEBUG nova.compute.provider_tree [None req-b4ff42ea-2473-423d-9440-c7342157735f tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Updating resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 generation from 145 to 146 during operation: update_inventory {{(pid=62405) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1917.775424] env[62405]: DEBUG nova.compute.provider_tree [None req-b4ff42ea-2473-423d-9440-c7342157735f tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1918.028993] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4b7d658d-85cf-4c1f-b5d9-0ffe7c9aee52 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquiring lock "81aebf11-5d80-4a86-b232-3ecc5f3892c2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1918.029825] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4b7d658d-85cf-4c1f-b5d9-0ffe7c9aee52 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Lock "81aebf11-5d80-4a86-b232-3ecc5f3892c2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1918.029825] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4b7d658d-85cf-4c1f-b5d9-0ffe7c9aee52 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Lock "81aebf11-5d80-4a86-b232-3ecc5f3892c2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1918.042619] env[62405]: DEBUG oslo_vmware.api [None req-3ad51670-e052-4e7f-86d3-62f4d57080f7 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1947825, 'name': ReconfigVM_Task, 'duration_secs': 0.323098} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1918.043789] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-3ad51670-e052-4e7f-86d3-62f4d57080f7 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 67bf25ea-5774-4246-a3e6-2aeb0ebf6731] Reconfigured VM instance instance-00000024 to detach disk 2001 {{(pid=62405) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1918.048973] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-989e4527-d5c9-4b36-b8b2-dd7c25149a06 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.065358] env[62405]: DEBUG oslo_vmware.api [None req-3ad51670-e052-4e7f-86d3-62f4d57080f7 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Waiting for the task: (returnval){ [ 1918.065358] env[62405]: value = "task-1947826" [ 1918.065358] env[62405]: _type = "Task" [ 1918.065358] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1918.074391] env[62405]: DEBUG oslo_vmware.api [None req-3ad51670-e052-4e7f-86d3-62f4d57080f7 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1947826, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1918.088711] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947821, 'name': CreateVM_Task} progress is 25%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1918.281556] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b4ff42ea-2473-423d-9440-c7342157735f tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.100s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1918.285900] env[62405]: DEBUG oslo_concurrency.lockutils [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.012s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1918.286789] env[62405]: INFO nova.compute.claims [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: f269844b-a9b4-40a2-8ba4-a62ee59b4e40] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1918.319558] env[62405]: INFO nova.scheduler.client.report [None req-b4ff42ea-2473-423d-9440-c7342157735f tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Deleted allocations for instance 8f133517-cff2-40c7-8333-a9116163313a [ 1918.579813] env[62405]: DEBUG oslo_vmware.api [None req-3ad51670-e052-4e7f-86d3-62f4d57080f7 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1947826, 'name': ReconfigVM_Task, 'duration_secs': 0.146243} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1918.579813] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-3ad51670-e052-4e7f-86d3-62f4d57080f7 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 67bf25ea-5774-4246-a3e6-2aeb0ebf6731] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-401433', 'volume_id': 'dd61f776-0703-457f-8823-3fc5792787a0', 'name': 'volume-dd61f776-0703-457f-8823-3fc5792787a0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '67bf25ea-5774-4246-a3e6-2aeb0ebf6731', 'attached_at': '', 'detached_at': '', 'volume_id': 'dd61f776-0703-457f-8823-3fc5792787a0', 'serial': 'dd61f776-0703-457f-8823-3fc5792787a0'} {{(pid=62405) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1918.590814] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947821, 'name': CreateVM_Task} progress is 25%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1918.646025] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a2d98da2-3e7e-4018-9544-889fcc0404ac tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Acquiring lock "d186b2f4-3fd1-44be-b8a4-080972aff3a0" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1918.646025] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a2d98da2-3e7e-4018-9544-889fcc0404ac tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Lock "d186b2f4-3fd1-44be-b8a4-080972aff3a0" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1918.829767] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b4ff42ea-2473-423d-9440-c7342157735f tempest-ServersAdmin275Test-1749089324 tempest-ServersAdmin275Test-1749089324-project-member] Lock "8f133517-cff2-40c7-8333-a9116163313a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.824s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1919.070365] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4b7d658d-85cf-4c1f-b5d9-0ffe7c9aee52 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquiring lock "refresh_cache-81aebf11-5d80-4a86-b232-3ecc5f3892c2" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1919.070580] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4b7d658d-85cf-4c1f-b5d9-0ffe7c9aee52 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquired lock "refresh_cache-81aebf11-5d80-4a86-b232-3ecc5f3892c2" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1919.070765] env[62405]: DEBUG nova.network.neutron [None req-4b7d658d-85cf-4c1f-b5d9-0ffe7c9aee52 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: 81aebf11-5d80-4a86-b232-3ecc5f3892c2] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1919.092034] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947821, 'name': CreateVM_Task} progress is 25%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1919.131178] env[62405]: DEBUG nova.objects.instance [None req-3ad51670-e052-4e7f-86d3-62f4d57080f7 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Lazy-loading 'flavor' on Instance uuid 67bf25ea-5774-4246-a3e6-2aeb0ebf6731 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1919.150405] env[62405]: DEBUG nova.compute.utils [None req-a2d98da2-3e7e-4018-9544-889fcc0404ac tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1919.573012] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a778f90-9941-4168-9dad-0c74961453e7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.587019] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23260fab-6238-4b29-ac73-1a0c59430caa {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.599022] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947821, 'name': CreateVM_Task} progress is 25%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1919.628541] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64234fb8-adf1-4c2f-8ba3-7668eddef918 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.638308] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef38c955-8faa-4061-a6ac-eeb9ce7de3ed {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.654823] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a2d98da2-3e7e-4018-9544-889fcc0404ac tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Lock "d186b2f4-3fd1-44be-b8a4-080972aff3a0" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.009s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1919.655686] env[62405]: DEBUG nova.compute.provider_tree [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1919.843525] env[62405]: DEBUG nova.network.neutron [None req-4b7d658d-85cf-4c1f-b5d9-0ffe7c9aee52 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: 81aebf11-5d80-4a86-b232-3ecc5f3892c2] Updating instance_info_cache with network_info: [{"id": "af199d5b-90da-4443-ac9d-e8d6bf721a80", "address": "fa:16:3e:80:08:de", "network": {"id": "845c4582-a0c8-4565-8025-5cc0fd22ff9a", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1066509768-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f1a1645e38674042828c78155974f95e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaf199d5b-90", "ovs_interfaceid": "af199d5b-90da-4443-ac9d-e8d6bf721a80", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1920.093708] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947821, 'name': CreateVM_Task} progress is 25%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1920.145137] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3ad51670-e052-4e7f-86d3-62f4d57080f7 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Lock "67bf25ea-5774-4246-a3e6-2aeb0ebf6731" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.271s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1920.158939] env[62405]: DEBUG nova.scheduler.client.report [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1920.346221] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4b7d658d-85cf-4c1f-b5d9-0ffe7c9aee52 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Releasing lock "refresh_cache-81aebf11-5d80-4a86-b232-3ecc5f3892c2" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1920.599129] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947821, 'name': CreateVM_Task} progress is 25%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1920.664103] env[62405]: DEBUG oslo_concurrency.lockutils [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.379s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1920.664598] env[62405]: DEBUG nova.compute.manager [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: f269844b-a9b4-40a2-8ba4-a62ee59b4e40] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1920.668759] env[62405]: DEBUG oslo_concurrency.lockutils [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.130s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1920.669646] env[62405]: INFO nova.compute.claims [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: a91a6d04-2ec0-4568-bdb3-732d148644de] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1920.745014] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a2d98da2-3e7e-4018-9544-889fcc0404ac tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Acquiring lock "d186b2f4-3fd1-44be-b8a4-080972aff3a0" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1920.745166] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a2d98da2-3e7e-4018-9544-889fcc0404ac tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Lock "d186b2f4-3fd1-44be-b8a4-080972aff3a0" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1920.745449] env[62405]: INFO nova.compute.manager [None req-a2d98da2-3e7e-4018-9544-889fcc0404ac tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Attaching volume 3e3abe30-bd3e-4a0a-a97d-e8583b031955 to /dev/sdb [ 1920.793184] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f6047bb-8670-4ebc-97c6-a32fc5741ee0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1920.801310] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-564647c3-4d2d-4555-9300-37e0a9d77047 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1920.821363] env[62405]: DEBUG nova.virt.block_device [None req-a2d98da2-3e7e-4018-9544-889fcc0404ac tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Updating existing volume attachment record: 418bd473-326d-4507-9db8-05e80dc546b6 {{(pid=62405) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1920.825140] env[62405]: DEBUG oslo_concurrency.lockutils [None req-772d3a65-2cfe-4e81-8299-60125aaab52f tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Acquiring lock "67bf25ea-5774-4246-a3e6-2aeb0ebf6731" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1920.825531] env[62405]: DEBUG oslo_concurrency.lockutils [None req-772d3a65-2cfe-4e81-8299-60125aaab52f tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Lock "67bf25ea-5774-4246-a3e6-2aeb0ebf6731" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1920.825804] env[62405]: DEBUG oslo_concurrency.lockutils [None req-772d3a65-2cfe-4e81-8299-60125aaab52f tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Acquiring lock "67bf25ea-5774-4246-a3e6-2aeb0ebf6731-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1920.826135] env[62405]: DEBUG oslo_concurrency.lockutils [None req-772d3a65-2cfe-4e81-8299-60125aaab52f tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Lock "67bf25ea-5774-4246-a3e6-2aeb0ebf6731-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1920.826760] env[62405]: DEBUG oslo_concurrency.lockutils [None req-772d3a65-2cfe-4e81-8299-60125aaab52f tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Lock "67bf25ea-5774-4246-a3e6-2aeb0ebf6731-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1920.828993] env[62405]: INFO nova.compute.manager [None req-772d3a65-2cfe-4e81-8299-60125aaab52f tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 67bf25ea-5774-4246-a3e6-2aeb0ebf6731] Terminating instance [ 1920.876864] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-144b8542-23fe-462e-a91f-36017fd3b5e2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1920.898223] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbc28e59-485e-4927-aa26-c5c60f9d261d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1920.905699] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-4b7d658d-85cf-4c1f-b5d9-0ffe7c9aee52 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: 81aebf11-5d80-4a86-b232-3ecc5f3892c2] Updating instance '81aebf11-5d80-4a86-b232-3ecc5f3892c2' progress to 83 {{(pid=62405) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1920.989604] env[62405]: DEBUG oslo_vmware.rw_handles [None req-db095d67-8972-456d-a475-f7f18507a1ab tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5201cff7-3ef6-277f-73c5-5a0dce72e54c/disk-0.vmdk. {{(pid=62405) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1920.990562] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb4edc46-9e9c-41f5-bd7f-03cac15fbc92 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1920.997885] env[62405]: DEBUG oslo_vmware.rw_handles [None req-db095d67-8972-456d-a475-f7f18507a1ab tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5201cff7-3ef6-277f-73c5-5a0dce72e54c/disk-0.vmdk is in state: ready. {{(pid=62405) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1920.997885] env[62405]: ERROR oslo_vmware.rw_handles [None req-db095d67-8972-456d-a475-f7f18507a1ab tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5201cff7-3ef6-277f-73c5-5a0dce72e54c/disk-0.vmdk due to incomplete transfer. [ 1920.997885] env[62405]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-11193ea7-0fd1-4624-b8d8-48a7929f9ec3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.005431] env[62405]: DEBUG oslo_vmware.rw_handles [None req-db095d67-8972-456d-a475-f7f18507a1ab tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5201cff7-3ef6-277f-73c5-5a0dce72e54c/disk-0.vmdk. {{(pid=62405) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1921.005431] env[62405]: DEBUG nova.virt.vmwareapi.images [None req-db095d67-8972-456d-a475-f7f18507a1ab tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Uploaded image 6ee02a73-5729-47e1-93a1-23fefdcafc1e to the Glance image server {{(pid=62405) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1921.008530] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-db095d67-8972-456d-a475-f7f18507a1ab tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Destroying the VM {{(pid=62405) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1921.008530] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-2059655e-5eb0-47f0-a8b4-e1770728bb96 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.013642] env[62405]: DEBUG oslo_vmware.api [None req-db095d67-8972-456d-a475-f7f18507a1ab tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Waiting for the task: (returnval){ [ 1921.013642] env[62405]: value = "task-1947828" [ 1921.013642] env[62405]: _type = "Task" [ 1921.013642] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1921.022142] env[62405]: DEBUG oslo_vmware.api [None req-db095d67-8972-456d-a475-f7f18507a1ab tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': task-1947828, 'name': Destroy_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1921.095015] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947821, 'name': CreateVM_Task} progress is 25%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1921.174153] env[62405]: DEBUG nova.compute.utils [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1921.178975] env[62405]: DEBUG nova.compute.manager [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: f269844b-a9b4-40a2-8ba4-a62ee59b4e40] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1921.178975] env[62405]: DEBUG nova.network.neutron [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: f269844b-a9b4-40a2-8ba4-a62ee59b4e40] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1921.221882] env[62405]: DEBUG nova.policy [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '74ccaab252cb403bb54364c35d6dcbd4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1d2ff9a8cb1840889a4a2a87c663f59e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1921.332563] env[62405]: DEBUG nova.compute.manager [None req-772d3a65-2cfe-4e81-8299-60125aaab52f tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 67bf25ea-5774-4246-a3e6-2aeb0ebf6731] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1921.332798] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-772d3a65-2cfe-4e81-8299-60125aaab52f tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 67bf25ea-5774-4246-a3e6-2aeb0ebf6731] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1921.333697] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1fa6f7f-4db9-44e1-8572-0d8d9d0980a6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.341353] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-772d3a65-2cfe-4e81-8299-60125aaab52f tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 67bf25ea-5774-4246-a3e6-2aeb0ebf6731] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1921.342049] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b4cf3271-7dbf-4085-8278-d3f988a8f4e7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.348311] env[62405]: DEBUG oslo_vmware.api [None req-772d3a65-2cfe-4e81-8299-60125aaab52f tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Waiting for the task: (returnval){ [ 1921.348311] env[62405]: value = "task-1947831" [ 1921.348311] env[62405]: _type = "Task" [ 1921.348311] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1921.357711] env[62405]: DEBUG oslo_vmware.api [None req-772d3a65-2cfe-4e81-8299-60125aaab52f tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1947831, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1921.416811] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b7d658d-85cf-4c1f-b5d9-0ffe7c9aee52 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: 81aebf11-5d80-4a86-b232-3ecc5f3892c2] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1921.416811] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-97a887f5-19b9-474f-809c-b45381e1195a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.426083] env[62405]: DEBUG oslo_vmware.api [None req-4b7d658d-85cf-4c1f-b5d9-0ffe7c9aee52 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for the task: (returnval){ [ 1921.426083] env[62405]: value = "task-1947832" [ 1921.426083] env[62405]: _type = "Task" [ 1921.426083] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1921.442675] env[62405]: DEBUG oslo_vmware.api [None req-4b7d658d-85cf-4c1f-b5d9-0ffe7c9aee52 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947832, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1921.503420] env[62405]: DEBUG nova.network.neutron [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: f269844b-a9b4-40a2-8ba4-a62ee59b4e40] Successfully created port: 00aa4b00-fea2-4a08-bb0e-29da525135b9 {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1921.524175] env[62405]: DEBUG oslo_vmware.api [None req-db095d67-8972-456d-a475-f7f18507a1ab tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': task-1947828, 'name': Destroy_Task} progress is 33%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1921.600548] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947821, 'name': CreateVM_Task} progress is 25%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1921.682793] env[62405]: DEBUG nova.compute.manager [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: f269844b-a9b4-40a2-8ba4-a62ee59b4e40] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1921.733340] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-efd16a8c-b93f-464f-93c8-b2b1289fbd6a tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 3b9a6a82-a426-4802-9640-5b39e5e0ff49] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1921.733570] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-efd16a8c-b93f-464f-93c8-b2b1289fbd6a tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 3b9a6a82-a426-4802-9640-5b39e5e0ff49] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1921.734877] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-efd16a8c-b93f-464f-93c8-b2b1289fbd6a tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Deleting the datastore file [datastore1] 3b9a6a82-a426-4802-9640-5b39e5e0ff49 {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1921.734877] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-eec782e0-783f-4dbb-939a-caee6ea04101 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.741346] env[62405]: DEBUG oslo_vmware.api [None req-efd16a8c-b93f-464f-93c8-b2b1289fbd6a tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Waiting for the task: (returnval){ [ 1921.741346] env[62405]: value = "task-1947833" [ 1921.741346] env[62405]: _type = "Task" [ 1921.741346] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1921.758741] env[62405]: DEBUG oslo_vmware.api [None req-efd16a8c-b93f-464f-93c8-b2b1289fbd6a tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947833, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1921.860713] env[62405]: DEBUG oslo_vmware.api [None req-772d3a65-2cfe-4e81-8299-60125aaab52f tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1947831, 'name': PowerOffVM_Task, 'duration_secs': 0.306951} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1921.861231] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-772d3a65-2cfe-4e81-8299-60125aaab52f tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 67bf25ea-5774-4246-a3e6-2aeb0ebf6731] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1921.861496] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-772d3a65-2cfe-4e81-8299-60125aaab52f tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 67bf25ea-5774-4246-a3e6-2aeb0ebf6731] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1921.861773] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e95ba843-a466-4acc-a89b-c959c8adc128 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.936902] env[62405]: DEBUG oslo_vmware.api [None req-4b7d658d-85cf-4c1f-b5d9-0ffe7c9aee52 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947832, 'name': PowerOnVM_Task, 'duration_secs': 0.400898} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1921.940534] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b7d658d-85cf-4c1f-b5d9-0ffe7c9aee52 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: 81aebf11-5d80-4a86-b232-3ecc5f3892c2] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1921.940743] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-4b7d658d-85cf-4c1f-b5d9-0ffe7c9aee52 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: 81aebf11-5d80-4a86-b232-3ecc5f3892c2] Updating instance '81aebf11-5d80-4a86-b232-3ecc5f3892c2' progress to 100 {{(pid=62405) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1922.032832] env[62405]: DEBUG oslo_vmware.api [None req-db095d67-8972-456d-a475-f7f18507a1ab tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': task-1947828, 'name': Destroy_Task, 'duration_secs': 1.003099} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1922.032832] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-db095d67-8972-456d-a475-f7f18507a1ab tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Destroyed the VM [ 1922.032832] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-db095d67-8972-456d-a475-f7f18507a1ab tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Deleting Snapshot of the VM instance {{(pid=62405) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1922.033109] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-f207a4e7-e192-46d3-8cc2-a2e4991007e3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.044021] env[62405]: DEBUG oslo_vmware.api [None req-db095d67-8972-456d-a475-f7f18507a1ab tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Waiting for the task: (returnval){ [ 1922.044021] env[62405]: value = "task-1947835" [ 1922.044021] env[62405]: _type = "Task" [ 1922.044021] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1922.051857] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb686280-f084-45ad-8252-46fde7638749 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.059033] env[62405]: DEBUG oslo_vmware.api [None req-db095d67-8972-456d-a475-f7f18507a1ab tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': task-1947835, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1922.059176] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-772d3a65-2cfe-4e81-8299-60125aaab52f tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 67bf25ea-5774-4246-a3e6-2aeb0ebf6731] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1922.059374] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-772d3a65-2cfe-4e81-8299-60125aaab52f tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 67bf25ea-5774-4246-a3e6-2aeb0ebf6731] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1922.059465] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-772d3a65-2cfe-4e81-8299-60125aaab52f tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Deleting the datastore file [datastore1] 67bf25ea-5774-4246-a3e6-2aeb0ebf6731 {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1922.060171] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cf3f2375-7022-4735-99db-2d6557a07321 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.065199] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cba8d6a-6e11-40e0-8d68-e4e5b13db564 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.070047] env[62405]: DEBUG oslo_vmware.api [None req-772d3a65-2cfe-4e81-8299-60125aaab52f tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Waiting for the task: (returnval){ [ 1922.070047] env[62405]: value = "task-1947836" [ 1922.070047] env[62405]: _type = "Task" [ 1922.070047] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1922.104723] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45ff9196-f670-4e05-99c8-3f861b365862 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.112851] env[62405]: DEBUG oslo_vmware.api [None req-772d3a65-2cfe-4e81-8299-60125aaab52f tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1947836, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1922.118624] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947821, 'name': CreateVM_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1922.125805] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Acquiring lock "1f8293f9-5fba-4bf4-bf7c-65837c1092a0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1922.126268] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Lock "1f8293f9-5fba-4bf4-bf7c-65837c1092a0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1922.131125] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-218ac4a5-aa0e-4940-8b8f-f4ce35d8d176 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.144905] env[62405]: DEBUG nova.compute.provider_tree [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1922.250721] env[62405]: DEBUG oslo_vmware.api [None req-efd16a8c-b93f-464f-93c8-b2b1289fbd6a tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947833, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.252274} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1922.250985] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-efd16a8c-b93f-464f-93c8-b2b1289fbd6a tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1922.251194] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-efd16a8c-b93f-464f-93c8-b2b1289fbd6a tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 3b9a6a82-a426-4802-9640-5b39e5e0ff49] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1922.251772] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-efd16a8c-b93f-464f-93c8-b2b1289fbd6a tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 3b9a6a82-a426-4802-9640-5b39e5e0ff49] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1922.251772] env[62405]: INFO nova.compute.manager [None req-efd16a8c-b93f-464f-93c8-b2b1289fbd6a tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 3b9a6a82-a426-4802-9640-5b39e5e0ff49] Took 6.72 seconds to destroy the instance on the hypervisor. [ 1922.251772] env[62405]: DEBUG oslo.service.loopingcall [None req-efd16a8c-b93f-464f-93c8-b2b1289fbd6a tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1922.251933] env[62405]: DEBUG nova.compute.manager [-] [instance: 3b9a6a82-a426-4802-9640-5b39e5e0ff49] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1922.252039] env[62405]: DEBUG nova.network.neutron [-] [instance: 3b9a6a82-a426-4802-9640-5b39e5e0ff49] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1922.556321] env[62405]: DEBUG oslo_vmware.api [None req-db095d67-8972-456d-a475-f7f18507a1ab tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': task-1947835, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1922.581581] env[62405]: DEBUG oslo_vmware.api [None req-772d3a65-2cfe-4e81-8299-60125aaab52f tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1947836, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.174721} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1922.581770] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-772d3a65-2cfe-4e81-8299-60125aaab52f tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1922.581973] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-772d3a65-2cfe-4e81-8299-60125aaab52f tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 67bf25ea-5774-4246-a3e6-2aeb0ebf6731] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1922.582299] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-772d3a65-2cfe-4e81-8299-60125aaab52f tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 67bf25ea-5774-4246-a3e6-2aeb0ebf6731] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1922.582516] env[62405]: INFO nova.compute.manager [None req-772d3a65-2cfe-4e81-8299-60125aaab52f tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 67bf25ea-5774-4246-a3e6-2aeb0ebf6731] Took 1.25 seconds to destroy the instance on the hypervisor. [ 1922.582763] env[62405]: DEBUG oslo.service.loopingcall [None req-772d3a65-2cfe-4e81-8299-60125aaab52f tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1922.582973] env[62405]: DEBUG nova.compute.manager [-] [instance: 67bf25ea-5774-4246-a3e6-2aeb0ebf6731] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1922.583168] env[62405]: DEBUG nova.network.neutron [-] [instance: 67bf25ea-5774-4246-a3e6-2aeb0ebf6731] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1922.610124] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947821, 'name': CreateVM_Task, 'duration_secs': 7.061779} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1922.615023] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1922.615023] env[62405]: DEBUG nova.compute.manager [req-1e17498b-8fdb-43dd-8c8d-48c6cd8279d1 req-357ef731-393a-4f52-a2e6-5b426fe89237 service nova] [instance: 3b9a6a82-a426-4802-9640-5b39e5e0ff49] Received event network-vif-deleted-ea9789f3-0da8-4e46-8cde-6c9ccb5b562d {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1922.615023] env[62405]: INFO nova.compute.manager [req-1e17498b-8fdb-43dd-8c8d-48c6cd8279d1 req-357ef731-393a-4f52-a2e6-5b426fe89237 service nova] [instance: 3b9a6a82-a426-4802-9640-5b39e5e0ff49] Neutron deleted interface ea9789f3-0da8-4e46-8cde-6c9ccb5b562d; detaching it from the instance and deleting it from the info cache [ 1922.615023] env[62405]: DEBUG nova.network.neutron [req-1e17498b-8fdb-43dd-8c8d-48c6cd8279d1 req-357ef731-393a-4f52-a2e6-5b426fe89237 service nova] [instance: 3b9a6a82-a426-4802-9640-5b39e5e0ff49] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1922.615023] env[62405]: DEBUG oslo_concurrency.lockutils [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1922.615023] env[62405]: DEBUG oslo_concurrency.lockutils [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1922.615657] env[62405]: DEBUG oslo_concurrency.lockutils [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1922.615994] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9aa2b95f-3b07-45f0-8091-38232528db4c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.621330] env[62405]: DEBUG oslo_vmware.api [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Waiting for the task: (returnval){ [ 1922.621330] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52e19f49-1978-2e66-4ced-724172a6dc45" [ 1922.621330] env[62405]: _type = "Task" [ 1922.621330] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1922.640381] env[62405]: DEBUG nova.compute.manager [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] [instance: 1f8293f9-5fba-4bf4-bf7c-65837c1092a0] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1922.646735] env[62405]: DEBUG oslo_vmware.api [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52e19f49-1978-2e66-4ced-724172a6dc45, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1922.648705] env[62405]: DEBUG nova.scheduler.client.report [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1922.692006] env[62405]: DEBUG nova.compute.manager [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: f269844b-a9b4-40a2-8ba4-a62ee59b4e40] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1922.732417] env[62405]: DEBUG nova.virt.hardware [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1922.732417] env[62405]: DEBUG nova.virt.hardware [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1922.732417] env[62405]: DEBUG nova.virt.hardware [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1922.732417] env[62405]: DEBUG nova.virt.hardware [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1922.732417] env[62405]: DEBUG nova.virt.hardware [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1922.732750] env[62405]: DEBUG nova.virt.hardware [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1922.732786] env[62405]: DEBUG nova.virt.hardware [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1922.732967] env[62405]: DEBUG nova.virt.hardware [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1922.733193] env[62405]: DEBUG nova.virt.hardware [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1922.733437] env[62405]: DEBUG nova.virt.hardware [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1922.733571] env[62405]: DEBUG nova.virt.hardware [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1922.734575] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d0fd264-353c-400e-9c46-96a2e4e27f2c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.743954] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cfe7453-cd23-42fe-ab31-e612f25f4bf2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.026963] env[62405]: DEBUG nova.network.neutron [-] [instance: 3b9a6a82-a426-4802-9640-5b39e5e0ff49] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1923.054955] env[62405]: DEBUG oslo_vmware.api [None req-db095d67-8972-456d-a475-f7f18507a1ab tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': task-1947835, 'name': RemoveSnapshot_Task, 'duration_secs': 0.829542} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1923.055529] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-db095d67-8972-456d-a475-f7f18507a1ab tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Deleted Snapshot of the VM instance {{(pid=62405) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1923.055853] env[62405]: DEBUG nova.compute.manager [None req-db095d67-8972-456d-a475-f7f18507a1ab tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1923.056985] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faeed863-6b51-4017-a9fc-b00a605b187f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.117713] env[62405]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d5dae75e-e1c7-4674-ba97-33bb10a9f813 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.133319] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbb7ded1-06dd-4597-b295-7034363c253c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.153452] env[62405]: DEBUG oslo_vmware.api [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52e19f49-1978-2e66-4ced-724172a6dc45, 'name': SearchDatastore_Task, 'duration_secs': 0.01701} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1923.154418] env[62405]: DEBUG oslo_concurrency.lockutils [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.486s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1923.155388] env[62405]: DEBUG nova.compute.manager [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: a91a6d04-2ec0-4568-bdb3-732d148644de] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1923.160436] env[62405]: DEBUG oslo_concurrency.lockutils [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1923.163019] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1923.163019] env[62405]: DEBUG oslo_concurrency.lockutils [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1923.163019] env[62405]: DEBUG oslo_concurrency.lockutils [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1923.163019] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1923.165043] env[62405]: DEBUG oslo_concurrency.lockutils [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.993s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1923.168972] env[62405]: INFO nova.compute.claims [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 0d2b305d-d754-413c-afdf-3a2e8f143891] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1923.172862] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-40e6406b-f8a7-4543-91e8-9f88bbda3e07 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.192498] env[62405]: DEBUG nova.compute.manager [req-1e17498b-8fdb-43dd-8c8d-48c6cd8279d1 req-357ef731-393a-4f52-a2e6-5b426fe89237 service nova] [instance: 3b9a6a82-a426-4802-9640-5b39e5e0ff49] Detach interface failed, port_id=ea9789f3-0da8-4e46-8cde-6c9ccb5b562d, reason: Instance 3b9a6a82-a426-4802-9640-5b39e5e0ff49 could not be found. {{(pid=62405) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11483}} [ 1923.200655] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1923.200852] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1923.201646] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4b55e33d-3771-4fdd-95cb-87cf2452ea3e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.209248] env[62405]: DEBUG oslo_vmware.api [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Waiting for the task: (returnval){ [ 1923.209248] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52b31775-4d3f-5629-5815-e544cbfcf537" [ 1923.209248] env[62405]: _type = "Task" [ 1923.209248] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1923.219338] env[62405]: DEBUG oslo_vmware.api [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52b31775-4d3f-5629-5815-e544cbfcf537, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1923.219421] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1923.485999] env[62405]: DEBUG nova.compute.manager [req-1f18d7e1-09b5-48b6-b7d1-75f81fef3737 req-82b14510-3ebc-460c-bf9d-dd46cd0d3f54 service nova] [instance: f269844b-a9b4-40a2-8ba4-a62ee59b4e40] Received event network-vif-plugged-00aa4b00-fea2-4a08-bb0e-29da525135b9 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1923.486130] env[62405]: DEBUG oslo_concurrency.lockutils [req-1f18d7e1-09b5-48b6-b7d1-75f81fef3737 req-82b14510-3ebc-460c-bf9d-dd46cd0d3f54 service nova] Acquiring lock "f269844b-a9b4-40a2-8ba4-a62ee59b4e40-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1923.486355] env[62405]: DEBUG oslo_concurrency.lockutils [req-1f18d7e1-09b5-48b6-b7d1-75f81fef3737 req-82b14510-3ebc-460c-bf9d-dd46cd0d3f54 service nova] Lock "f269844b-a9b4-40a2-8ba4-a62ee59b4e40-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1923.486537] env[62405]: DEBUG oslo_concurrency.lockutils [req-1f18d7e1-09b5-48b6-b7d1-75f81fef3737 req-82b14510-3ebc-460c-bf9d-dd46cd0d3f54 service nova] Lock "f269844b-a9b4-40a2-8ba4-a62ee59b4e40-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1923.486718] env[62405]: DEBUG nova.compute.manager [req-1f18d7e1-09b5-48b6-b7d1-75f81fef3737 req-82b14510-3ebc-460c-bf9d-dd46cd0d3f54 service nova] [instance: f269844b-a9b4-40a2-8ba4-a62ee59b4e40] No waiting events found dispatching network-vif-plugged-00aa4b00-fea2-4a08-bb0e-29da525135b9 {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1923.486903] env[62405]: WARNING nova.compute.manager [req-1f18d7e1-09b5-48b6-b7d1-75f81fef3737 req-82b14510-3ebc-460c-bf9d-dd46cd0d3f54 service nova] [instance: f269844b-a9b4-40a2-8ba4-a62ee59b4e40] Received unexpected event network-vif-plugged-00aa4b00-fea2-4a08-bb0e-29da525135b9 for instance with vm_state building and task_state spawning. [ 1923.530356] env[62405]: INFO nova.compute.manager [-] [instance: 3b9a6a82-a426-4802-9640-5b39e5e0ff49] Took 1.28 seconds to deallocate network for instance. [ 1923.570166] env[62405]: INFO nova.compute.manager [None req-db095d67-8972-456d-a475-f7f18507a1ab tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Shelve offloading [ 1923.593104] env[62405]: DEBUG nova.network.neutron [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: f269844b-a9b4-40a2-8ba4-a62ee59b4e40] Successfully updated port: 00aa4b00-fea2-4a08-bb0e-29da525135b9 {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1923.661278] env[62405]: DEBUG nova.compute.utils [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1923.663459] env[62405]: DEBUG nova.compute.manager [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: a91a6d04-2ec0-4568-bdb3-732d148644de] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1923.663729] env[62405]: DEBUG nova.network.neutron [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: a91a6d04-2ec0-4568-bdb3-732d148644de] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1923.721837] env[62405]: DEBUG oslo_vmware.api [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52b31775-4d3f-5629-5815-e544cbfcf537, 'name': SearchDatastore_Task, 'duration_secs': 0.016952} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1923.722810] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fb2488db-f04e-49b8-9e9f-77f91ad629e8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.728713] env[62405]: DEBUG oslo_vmware.api [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Waiting for the task: (returnval){ [ 1923.728713] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52b38936-8482-a7ab-d0a0-1d26e8cc3b4b" [ 1923.728713] env[62405]: _type = "Task" [ 1923.728713] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1923.738457] env[62405]: DEBUG oslo_vmware.api [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52b38936-8482-a7ab-d0a0-1d26e8cc3b4b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1923.796592] env[62405]: DEBUG nova.policy [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6eea29f093ad409eb10eb3b50c194ff3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7541d8c77a3f434094bc30a4d402bfcb', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1923.986025] env[62405]: DEBUG nova.network.neutron [-] [instance: 67bf25ea-5774-4246-a3e6-2aeb0ebf6731] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1924.037015] env[62405]: DEBUG oslo_concurrency.lockutils [None req-efd16a8c-b93f-464f-93c8-b2b1289fbd6a tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1924.074439] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-db095d67-8972-456d-a475-f7f18507a1ab tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1924.074755] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0ce98cb4-3289-4b47-8616-d1decabe72e2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1924.082343] env[62405]: DEBUG oslo_vmware.api [None req-db095d67-8972-456d-a475-f7f18507a1ab tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Waiting for the task: (returnval){ [ 1924.082343] env[62405]: value = "task-1947838" [ 1924.082343] env[62405]: _type = "Task" [ 1924.082343] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1924.091676] env[62405]: DEBUG oslo_vmware.api [None req-db095d67-8972-456d-a475-f7f18507a1ab tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': task-1947838, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1924.095579] env[62405]: DEBUG oslo_concurrency.lockutils [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Acquiring lock "refresh_cache-f269844b-a9b4-40a2-8ba4-a62ee59b4e40" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1924.095718] env[62405]: DEBUG oslo_concurrency.lockutils [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Acquired lock "refresh_cache-f269844b-a9b4-40a2-8ba4-a62ee59b4e40" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1924.095857] env[62405]: DEBUG nova.network.neutron [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: f269844b-a9b4-40a2-8ba4-a62ee59b4e40] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1924.132722] env[62405]: DEBUG oslo_concurrency.lockutils [None req-50c4893f-da17-4ac3-a25e-8875954fe761 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquiring lock "81aebf11-5d80-4a86-b232-3ecc5f3892c2" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1924.133195] env[62405]: DEBUG oslo_concurrency.lockutils [None req-50c4893f-da17-4ac3-a25e-8875954fe761 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Lock "81aebf11-5d80-4a86-b232-3ecc5f3892c2" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1924.133494] env[62405]: DEBUG nova.compute.manager [None req-50c4893f-da17-4ac3-a25e-8875954fe761 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: 81aebf11-5d80-4a86-b232-3ecc5f3892c2] Going to confirm migration 5 {{(pid=62405) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1924.169775] env[62405]: DEBUG nova.compute.manager [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: a91a6d04-2ec0-4568-bdb3-732d148644de] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1924.242590] env[62405]: DEBUG oslo_vmware.api [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52b38936-8482-a7ab-d0a0-1d26e8cc3b4b, 'name': SearchDatastore_Task, 'duration_secs': 0.01079} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1924.242885] env[62405]: DEBUG oslo_concurrency.lockutils [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1924.243165] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] b495f9e6-60c8-4509-a34f-2e7ed59b6d82/b495f9e6-60c8-4509-a34f-2e7ed59b6d82.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1924.243437] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1d419558-09e7-444b-be39-d1de56adab30 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1924.250626] env[62405]: DEBUG oslo_vmware.api [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Waiting for the task: (returnval){ [ 1924.250626] env[62405]: value = "task-1947839" [ 1924.250626] env[62405]: _type = "Task" [ 1924.250626] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1924.251533] env[62405]: DEBUG nova.network.neutron [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Successfully updated port: e84f02c8-cde2-4f59-88cd-ef80e8cc1bba {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1924.270301] env[62405]: DEBUG oslo_vmware.api [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1947839, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1924.280287] env[62405]: DEBUG nova.network.neutron [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: a91a6d04-2ec0-4568-bdb3-732d148644de] Successfully created port: 744277fe-5ae4-47a1-8b6e-f92b066ed2a3 {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1924.488475] env[62405]: INFO nova.compute.manager [-] [instance: 67bf25ea-5774-4246-a3e6-2aeb0ebf6731] Took 1.91 seconds to deallocate network for instance. [ 1924.563202] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed3676e6-5562-4029-b1a0-ef9ca67e98ac {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1924.573610] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46bd95e8-38bc-4ebd-b535-b085972d4b53 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1924.614399] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35c046c9-84a5-4519-b04d-af9eedda9948 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1924.629984] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-db095d67-8972-456d-a475-f7f18507a1ab tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] VM already powered off {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1924.630252] env[62405]: DEBUG nova.compute.manager [None req-db095d67-8972-456d-a475-f7f18507a1ab tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1924.631435] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1aac7b7-9d33-4380-a75d-60be9a74c3d4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1924.635161] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-248c2a50-9587-4c78-b58a-655a9fcc6c0d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1924.644607] env[62405]: DEBUG nova.compute.manager [req-7c9422d0-61cc-43dc-888f-2cbe4b06783f req-f6bc3ea1-fef7-4ebd-a629-dbdc12438ad2 service nova] [instance: 67bf25ea-5774-4246-a3e6-2aeb0ebf6731] Received event network-vif-deleted-165104f7-de0a-47de-a4a4-918b51216f4d {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1924.647887] env[62405]: DEBUG nova.network.neutron [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: f269844b-a9b4-40a2-8ba4-a62ee59b4e40] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1924.659867] env[62405]: DEBUG oslo_concurrency.lockutils [None req-db095d67-8972-456d-a475-f7f18507a1ab tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Acquiring lock "refresh_cache-f16e3d13-6db6-4f61-b0e4-661856a9166b" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1924.660118] env[62405]: DEBUG oslo_concurrency.lockutils [None req-db095d67-8972-456d-a475-f7f18507a1ab tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Acquired lock "refresh_cache-f16e3d13-6db6-4f61-b0e4-661856a9166b" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1924.660322] env[62405]: DEBUG nova.network.neutron [None req-db095d67-8972-456d-a475-f7f18507a1ab tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1924.661885] env[62405]: DEBUG nova.compute.provider_tree [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1924.732386] env[62405]: DEBUG oslo_concurrency.lockutils [None req-50c4893f-da17-4ac3-a25e-8875954fe761 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquiring lock "refresh_cache-81aebf11-5d80-4a86-b232-3ecc5f3892c2" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1924.732604] env[62405]: DEBUG oslo_concurrency.lockutils [None req-50c4893f-da17-4ac3-a25e-8875954fe761 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquired lock "refresh_cache-81aebf11-5d80-4a86-b232-3ecc5f3892c2" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1924.732966] env[62405]: DEBUG nova.network.neutron [None req-50c4893f-da17-4ac3-a25e-8875954fe761 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: 81aebf11-5d80-4a86-b232-3ecc5f3892c2] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1924.732966] env[62405]: DEBUG nova.objects.instance [None req-50c4893f-da17-4ac3-a25e-8875954fe761 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Lazy-loading 'info_cache' on Instance uuid 81aebf11-5d80-4a86-b232-3ecc5f3892c2 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1924.765551] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Acquiring lock "refresh_cache-6fcfada3-d73a-4814-bf45-d34b26d76d4a" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1924.765692] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Acquired lock "refresh_cache-6fcfada3-d73a-4814-bf45-d34b26d76d4a" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1924.765849] env[62405]: DEBUG nova.network.neutron [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1924.767194] env[62405]: DEBUG oslo_vmware.api [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1947839, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1924.835447] env[62405]: DEBUG nova.network.neutron [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: f269844b-a9b4-40a2-8ba4-a62ee59b4e40] Updating instance_info_cache with network_info: [{"id": "00aa4b00-fea2-4a08-bb0e-29da525135b9", "address": "fa:16:3e:26:35:78", "network": {"id": "b8b3a499-0f88-47ce-900b-2cc51e1d123f", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-5251275-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1d2ff9a8cb1840889a4a2a87c663f59e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6399297e-11b6-47b0-9a9f-712bb90b6ea1", "external-id": "nsx-vlan-transportzone-213", "segmentation_id": 213, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap00aa4b00-fe", "ovs_interfaceid": "00aa4b00-fea2-4a08-bb0e-29da525135b9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1924.997898] env[62405]: DEBUG oslo_concurrency.lockutils [None req-772d3a65-2cfe-4e81-8299-60125aaab52f tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1925.166454] env[62405]: DEBUG nova.scheduler.client.report [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1925.179865] env[62405]: DEBUG nova.compute.manager [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: a91a6d04-2ec0-4568-bdb3-732d148644de] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1925.206108] env[62405]: DEBUG nova.virt.hardware [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1925.206326] env[62405]: DEBUG nova.virt.hardware [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1925.206501] env[62405]: DEBUG nova.virt.hardware [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1925.206712] env[62405]: DEBUG nova.virt.hardware [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1925.206864] env[62405]: DEBUG nova.virt.hardware [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1925.207027] env[62405]: DEBUG nova.virt.hardware [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1925.207270] env[62405]: DEBUG nova.virt.hardware [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1925.207465] env[62405]: DEBUG nova.virt.hardware [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1925.207653] env[62405]: DEBUG nova.virt.hardware [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1925.207822] env[62405]: DEBUG nova.virt.hardware [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1925.208022] env[62405]: DEBUG nova.virt.hardware [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1925.208849] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a51a4dc3-ad9d-4dfc-b01b-74a19ebaea90 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.216891] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-048ec37d-c887-4cee-a534-82279b3235ba {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.262361] env[62405]: DEBUG oslo_vmware.api [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1947839, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.57939} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1925.262624] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] b495f9e6-60c8-4509-a34f-2e7ed59b6d82/b495f9e6-60c8-4509-a34f-2e7ed59b6d82.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1925.262830] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1925.263076] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-93ece337-e492-4c05-8a9f-597886e8e471 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.271097] env[62405]: DEBUG oslo_vmware.api [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Waiting for the task: (returnval){ [ 1925.271097] env[62405]: value = "task-1947840" [ 1925.271097] env[62405]: _type = "Task" [ 1925.271097] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1925.279108] env[62405]: DEBUG oslo_vmware.api [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1947840, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1925.319140] env[62405]: DEBUG nova.network.neutron [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1925.341197] env[62405]: DEBUG oslo_concurrency.lockutils [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Releasing lock "refresh_cache-f269844b-a9b4-40a2-8ba4-a62ee59b4e40" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1925.341197] env[62405]: DEBUG nova.compute.manager [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: f269844b-a9b4-40a2-8ba4-a62ee59b4e40] Instance network_info: |[{"id": "00aa4b00-fea2-4a08-bb0e-29da525135b9", "address": "fa:16:3e:26:35:78", "network": {"id": "b8b3a499-0f88-47ce-900b-2cc51e1d123f", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-5251275-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1d2ff9a8cb1840889a4a2a87c663f59e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6399297e-11b6-47b0-9a9f-712bb90b6ea1", "external-id": "nsx-vlan-transportzone-213", "segmentation_id": 213, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap00aa4b00-fe", "ovs_interfaceid": "00aa4b00-fea2-4a08-bb0e-29da525135b9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1925.341197] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: f269844b-a9b4-40a2-8ba4-a62ee59b4e40] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:26:35:78', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6399297e-11b6-47b0-9a9f-712bb90b6ea1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '00aa4b00-fea2-4a08-bb0e-29da525135b9', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1925.349709] env[62405]: DEBUG oslo.service.loopingcall [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1925.352751] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f269844b-a9b4-40a2-8ba4-a62ee59b4e40] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1925.353067] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ff1663af-297c-4e8f-a607-a1c46629ec07 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.369560] env[62405]: DEBUG nova.network.neutron [None req-db095d67-8972-456d-a475-f7f18507a1ab tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Updating instance_info_cache with network_info: [{"id": "dba92750-bf41-4683-b71d-128391ff29d0", "address": "fa:16:3e:e6:78:c4", "network": {"id": "72000fdf-4f7a-4c95-a7ac-d8404249f55c", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-589425764-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "521150d8f23f4f76a0c785481c99e897", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "412cde91-d0f0-4193-b36b-d8b9d17384c6", "external-id": "nsx-vlan-transportzone-461", "segmentation_id": 461, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdba92750-bf", "ovs_interfaceid": "dba92750-bf41-4683-b71d-128391ff29d0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1925.374068] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1925.374068] env[62405]: value = "task-1947841" [ 1925.374068] env[62405]: _type = "Task" [ 1925.374068] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1925.383021] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947841, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1925.480552] env[62405]: DEBUG nova.network.neutron [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Updating instance_info_cache with network_info: [{"id": "e84f02c8-cde2-4f59-88cd-ef80e8cc1bba", "address": "fa:16:3e:f1:09:bd", "network": {"id": "feb681f7-0a8f-4000-89ec-88a027ee7f15", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-478938422-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28cfe90f16b140018a5802c02f751d9c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c894ab55-c869-4530-9702-cb46d173ce94", "external-id": "nsx-vlan-transportzone-792", "segmentation_id": 792, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape84f02c8-cd", "ovs_interfaceid": "e84f02c8-cde2-4f59-88cd-ef80e8cc1bba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1925.522969] env[62405]: DEBUG nova.compute.manager [req-d9c89620-94ce-4b6a-a9d0-b879bf6831a6 req-c69cfaf7-79f8-4d97-997b-e89d65c14bff service nova] [instance: f269844b-a9b4-40a2-8ba4-a62ee59b4e40] Received event network-changed-00aa4b00-fea2-4a08-bb0e-29da525135b9 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1925.523172] env[62405]: DEBUG nova.compute.manager [req-d9c89620-94ce-4b6a-a9d0-b879bf6831a6 req-c69cfaf7-79f8-4d97-997b-e89d65c14bff service nova] [instance: f269844b-a9b4-40a2-8ba4-a62ee59b4e40] Refreshing instance network info cache due to event network-changed-00aa4b00-fea2-4a08-bb0e-29da525135b9. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1925.523387] env[62405]: DEBUG oslo_concurrency.lockutils [req-d9c89620-94ce-4b6a-a9d0-b879bf6831a6 req-c69cfaf7-79f8-4d97-997b-e89d65c14bff service nova] Acquiring lock "refresh_cache-f269844b-a9b4-40a2-8ba4-a62ee59b4e40" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1925.523532] env[62405]: DEBUG oslo_concurrency.lockutils [req-d9c89620-94ce-4b6a-a9d0-b879bf6831a6 req-c69cfaf7-79f8-4d97-997b-e89d65c14bff service nova] Acquired lock "refresh_cache-f269844b-a9b4-40a2-8ba4-a62ee59b4e40" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1925.523692] env[62405]: DEBUG nova.network.neutron [req-d9c89620-94ce-4b6a-a9d0-b879bf6831a6 req-c69cfaf7-79f8-4d97-997b-e89d65c14bff service nova] [instance: f269844b-a9b4-40a2-8ba4-a62ee59b4e40] Refreshing network info cache for port 00aa4b00-fea2-4a08-bb0e-29da525135b9 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1925.671374] env[62405]: DEBUG oslo_concurrency.lockutils [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.506s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1925.671894] env[62405]: DEBUG nova.compute.manager [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 0d2b305d-d754-413c-afdf-3a2e8f143891] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1925.674365] env[62405]: DEBUG oslo_concurrency.lockutils [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.287s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1925.681016] env[62405]: INFO nova.compute.claims [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1925.781444] env[62405]: DEBUG oslo_vmware.api [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1947840, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06478} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1925.781704] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1925.782468] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c816e61d-e388-4096-8d82-290c76a60083 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.805707] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Reconfiguring VM instance instance-0000005c to attach disk [datastore1] b495f9e6-60c8-4509-a34f-2e7ed59b6d82/b495f9e6-60c8-4509-a34f-2e7ed59b6d82.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1925.806678] env[62405]: DEBUG nova.network.neutron [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: a91a6d04-2ec0-4568-bdb3-732d148644de] Successfully updated port: 744277fe-5ae4-47a1-8b6e-f92b066ed2a3 {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1925.807991] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bb78542f-f169-4dda-9190-f024f635252c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.829522] env[62405]: DEBUG oslo_vmware.api [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Waiting for the task: (returnval){ [ 1925.829522] env[62405]: value = "task-1947842" [ 1925.829522] env[62405]: _type = "Task" [ 1925.829522] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1925.837454] env[62405]: DEBUG oslo_vmware.api [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1947842, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1925.872546] env[62405]: DEBUG oslo_concurrency.lockutils [None req-db095d67-8972-456d-a475-f7f18507a1ab tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Releasing lock "refresh_cache-f16e3d13-6db6-4f61-b0e4-661856a9166b" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1925.875118] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-a2d98da2-3e7e-4018-9544-889fcc0404ac tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Volume attach. Driver type: vmdk {{(pid=62405) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1925.875366] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-a2d98da2-3e7e-4018-9544-889fcc0404ac tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-401540', 'volume_id': '3e3abe30-bd3e-4a0a-a97d-e8583b031955', 'name': 'volume-3e3abe30-bd3e-4a0a-a97d-e8583b031955', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'd186b2f4-3fd1-44be-b8a4-080972aff3a0', 'attached_at': '', 'detached_at': '', 'volume_id': '3e3abe30-bd3e-4a0a-a97d-e8583b031955', 'serial': '3e3abe30-bd3e-4a0a-a97d-e8583b031955'} {{(pid=62405) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1925.876286] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b18308e-a45e-44be-8c8d-a40ead6fea1b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.887216] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947841, 'name': CreateVM_Task} progress is 25%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1925.903562] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-551764da-5320-4a2c-95d0-dd299d42dc8f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.931521] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-a2d98da2-3e7e-4018-9544-889fcc0404ac tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Reconfiguring VM instance instance-0000003d to attach disk [datastore1] volume-3e3abe30-bd3e-4a0a-a97d-e8583b031955/volume-3e3abe30-bd3e-4a0a-a97d-e8583b031955.vmdk or device None with type thin {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1925.933888] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-88aacd25-5785-4753-91e9-ddcd2f3432b6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.954206] env[62405]: DEBUG oslo_vmware.api [None req-a2d98da2-3e7e-4018-9544-889fcc0404ac tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Waiting for the task: (returnval){ [ 1925.954206] env[62405]: value = "task-1947843" [ 1925.954206] env[62405]: _type = "Task" [ 1925.954206] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1925.963234] env[62405]: DEBUG oslo_vmware.api [None req-a2d98da2-3e7e-4018-9544-889fcc0404ac tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Task: {'id': task-1947843, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1925.983587] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Releasing lock "refresh_cache-6fcfada3-d73a-4814-bf45-d34b26d76d4a" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1925.983816] env[62405]: DEBUG nova.compute.manager [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Instance network_info: |[{"id": "e84f02c8-cde2-4f59-88cd-ef80e8cc1bba", "address": "fa:16:3e:f1:09:bd", "network": {"id": "feb681f7-0a8f-4000-89ec-88a027ee7f15", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-478938422-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28cfe90f16b140018a5802c02f751d9c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c894ab55-c869-4530-9702-cb46d173ce94", "external-id": "nsx-vlan-transportzone-792", "segmentation_id": 792, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape84f02c8-cd", "ovs_interfaceid": "e84f02c8-cde2-4f59-88cd-ef80e8cc1bba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1925.984318] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f1:09:bd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c894ab55-c869-4530-9702-cb46d173ce94', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e84f02c8-cde2-4f59-88cd-ef80e8cc1bba', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1925.992231] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Creating folder: Project (28cfe90f16b140018a5802c02f751d9c). Parent ref: group-v401284. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1925.994722] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-63dc8ffa-8019-4d23-958f-6726cb5cbb50 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.009616] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Created folder: Project (28cfe90f16b140018a5802c02f751d9c) in parent group-v401284. [ 1926.009847] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Creating folder: Instances. Parent ref: group-v401542. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1926.010129] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4a771647-fc69-4e96-972e-6794984ff80c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.021877] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Created folder: Instances in parent group-v401542. [ 1926.021877] env[62405]: DEBUG oslo.service.loopingcall [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1926.021877] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1926.021877] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-94cdaca5-4dc2-42d3-9498-36c1291e023c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.035929] env[62405]: DEBUG nova.network.neutron [None req-50c4893f-da17-4ac3-a25e-8875954fe761 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: 81aebf11-5d80-4a86-b232-3ecc5f3892c2] Updating instance_info_cache with network_info: [{"id": "af199d5b-90da-4443-ac9d-e8d6bf721a80", "address": "fa:16:3e:80:08:de", "network": {"id": "845c4582-a0c8-4565-8025-5cc0fd22ff9a", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1066509768-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f1a1645e38674042828c78155974f95e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaf199d5b-90", "ovs_interfaceid": "af199d5b-90da-4443-ac9d-e8d6bf721a80", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1926.045126] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1926.045126] env[62405]: value = "task-1947846" [ 1926.045126] env[62405]: _type = "Task" [ 1926.045126] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1926.055548] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947846, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1926.181912] env[62405]: DEBUG nova.compute.utils [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1926.185861] env[62405]: DEBUG nova.compute.manager [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 0d2b305d-d754-413c-afdf-3a2e8f143891] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1926.186075] env[62405]: DEBUG nova.network.neutron [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 0d2b305d-d754-413c-afdf-3a2e8f143891] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1926.233494] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-db095d67-8972-456d-a475-f7f18507a1ab tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1926.234727] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a965fae-bd51-4498-bf93-77c504b28cea {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.242709] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-db095d67-8972-456d-a475-f7f18507a1ab tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1926.244065] env[62405]: DEBUG nova.policy [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6eea29f093ad409eb10eb3b50c194ff3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7541d8c77a3f434094bc30a4d402bfcb', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1926.245367] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1b4e3075-3a6d-4a1a-a90f-9ca2273a6a1f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.323511] env[62405]: DEBUG oslo_concurrency.lockutils [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Acquiring lock "refresh_cache-a91a6d04-2ec0-4568-bdb3-732d148644de" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1926.323825] env[62405]: DEBUG oslo_concurrency.lockutils [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Acquired lock "refresh_cache-a91a6d04-2ec0-4568-bdb3-732d148644de" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1926.323825] env[62405]: DEBUG nova.network.neutron [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: a91a6d04-2ec0-4568-bdb3-732d148644de] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1926.340480] env[62405]: DEBUG oslo_vmware.api [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1947842, 'name': ReconfigVM_Task, 'duration_secs': 0.296041} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1926.340795] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Reconfigured VM instance instance-0000005c to attach disk [datastore1] b495f9e6-60c8-4509-a34f-2e7ed59b6d82/b495f9e6-60c8-4509-a34f-2e7ed59b6d82.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1926.341522] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-09bbdd4b-7d13-474c-be19-d49b77ffe2bc {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.353943] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-db095d67-8972-456d-a475-f7f18507a1ab tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1926.354391] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-db095d67-8972-456d-a475-f7f18507a1ab tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1926.354481] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-db095d67-8972-456d-a475-f7f18507a1ab tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Deleting the datastore file [datastore1] f16e3d13-6db6-4f61-b0e4-661856a9166b {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1926.359215] env[62405]: DEBUG oslo_vmware.api [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Waiting for the task: (returnval){ [ 1926.359215] env[62405]: value = "task-1947848" [ 1926.359215] env[62405]: _type = "Task" [ 1926.359215] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1926.359526] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2a38da0c-5224-4fa4-8136-0e9758a9c734 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.370680] env[62405]: DEBUG oslo_vmware.api [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1947848, 'name': Rename_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1926.372427] env[62405]: DEBUG oslo_vmware.api [None req-db095d67-8972-456d-a475-f7f18507a1ab tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Waiting for the task: (returnval){ [ 1926.372427] env[62405]: value = "task-1947849" [ 1926.372427] env[62405]: _type = "Task" [ 1926.372427] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1926.392746] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947841, 'name': CreateVM_Task, 'duration_secs': 0.682984} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1926.393060] env[62405]: DEBUG oslo_vmware.api [None req-db095d67-8972-456d-a475-f7f18507a1ab tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': task-1947849, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1926.393242] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f269844b-a9b4-40a2-8ba4-a62ee59b4e40] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1926.395307] env[62405]: DEBUG oslo_concurrency.lockutils [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1926.395435] env[62405]: DEBUG oslo_concurrency.lockutils [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1926.396141] env[62405]: DEBUG oslo_concurrency.lockutils [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1926.396412] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-776ab13f-7027-48ac-8bf4-5038cebd7959 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.401430] env[62405]: DEBUG oslo_vmware.api [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Waiting for the task: (returnval){ [ 1926.401430] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52227ca6-bab6-1191-dad3-7103b39de686" [ 1926.401430] env[62405]: _type = "Task" [ 1926.401430] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1926.410858] env[62405]: DEBUG oslo_vmware.api [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52227ca6-bab6-1191-dad3-7103b39de686, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1926.459885] env[62405]: DEBUG nova.network.neutron [req-d9c89620-94ce-4b6a-a9d0-b879bf6831a6 req-c69cfaf7-79f8-4d97-997b-e89d65c14bff service nova] [instance: f269844b-a9b4-40a2-8ba4-a62ee59b4e40] Updated VIF entry in instance network info cache for port 00aa4b00-fea2-4a08-bb0e-29da525135b9. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1926.460399] env[62405]: DEBUG nova.network.neutron [req-d9c89620-94ce-4b6a-a9d0-b879bf6831a6 req-c69cfaf7-79f8-4d97-997b-e89d65c14bff service nova] [instance: f269844b-a9b4-40a2-8ba4-a62ee59b4e40] Updating instance_info_cache with network_info: [{"id": "00aa4b00-fea2-4a08-bb0e-29da525135b9", "address": "fa:16:3e:26:35:78", "network": {"id": "b8b3a499-0f88-47ce-900b-2cc51e1d123f", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-5251275-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1d2ff9a8cb1840889a4a2a87c663f59e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6399297e-11b6-47b0-9a9f-712bb90b6ea1", "external-id": "nsx-vlan-transportzone-213", "segmentation_id": 213, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap00aa4b00-fe", "ovs_interfaceid": "00aa4b00-fea2-4a08-bb0e-29da525135b9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1926.468084] env[62405]: DEBUG oslo_vmware.api [None req-a2d98da2-3e7e-4018-9544-889fcc0404ac tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Task: {'id': task-1947843, 'name': ReconfigVM_Task, 'duration_secs': 0.391187} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1926.468084] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-a2d98da2-3e7e-4018-9544-889fcc0404ac tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Reconfigured VM instance instance-0000003d to attach disk [datastore1] volume-3e3abe30-bd3e-4a0a-a97d-e8583b031955/volume-3e3abe30-bd3e-4a0a-a97d-e8583b031955.vmdk or device None with type thin {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1926.473363] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2591401a-f8d2-44a7-b26a-031027a745f4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.489667] env[62405]: DEBUG oslo_vmware.api [None req-a2d98da2-3e7e-4018-9544-889fcc0404ac tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Waiting for the task: (returnval){ [ 1926.489667] env[62405]: value = "task-1947850" [ 1926.489667] env[62405]: _type = "Task" [ 1926.489667] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1926.505435] env[62405]: DEBUG oslo_vmware.api [None req-a2d98da2-3e7e-4018-9544-889fcc0404ac tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Task: {'id': task-1947850, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1926.533814] env[62405]: DEBUG nova.network.neutron [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 0d2b305d-d754-413c-afdf-3a2e8f143891] Successfully created port: b23a9aa4-c4ec-442c-abdc-2c7b2d5a9961 {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1926.540462] env[62405]: DEBUG oslo_concurrency.lockutils [None req-50c4893f-da17-4ac3-a25e-8875954fe761 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Releasing lock "refresh_cache-81aebf11-5d80-4a86-b232-3ecc5f3892c2" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1926.540835] env[62405]: DEBUG nova.objects.instance [None req-50c4893f-da17-4ac3-a25e-8875954fe761 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Lazy-loading 'migration_context' on Instance uuid 81aebf11-5d80-4a86-b232-3ecc5f3892c2 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1926.554086] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947846, 'name': CreateVM_Task, 'duration_secs': 0.365492} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1926.554302] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1926.555231] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1926.689059] env[62405]: DEBUG nova.compute.manager [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 0d2b305d-d754-413c-afdf-3a2e8f143891] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1926.871835] env[62405]: DEBUG oslo_vmware.api [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1947848, 'name': Rename_Task, 'duration_secs': 0.157721} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1926.872599] env[62405]: DEBUG nova.network.neutron [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: a91a6d04-2ec0-4568-bdb3-732d148644de] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1926.876681] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1926.877164] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-86d5c2c5-5aaf-46b6-a67a-ce980de8dba0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.886879] env[62405]: DEBUG oslo_vmware.api [None req-db095d67-8972-456d-a475-f7f18507a1ab tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': task-1947849, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.180946} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1926.890055] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-db095d67-8972-456d-a475-f7f18507a1ab tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1926.890247] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-db095d67-8972-456d-a475-f7f18507a1ab tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1926.890465] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-db095d67-8972-456d-a475-f7f18507a1ab tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1926.892196] env[62405]: DEBUG oslo_vmware.api [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Waiting for the task: (returnval){ [ 1926.892196] env[62405]: value = "task-1947851" [ 1926.892196] env[62405]: _type = "Task" [ 1926.892196] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1926.903682] env[62405]: DEBUG oslo_vmware.api [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1947851, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1926.909494] env[62405]: INFO nova.scheduler.client.report [None req-db095d67-8972-456d-a475-f7f18507a1ab tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Deleted allocations for instance f16e3d13-6db6-4f61-b0e4-661856a9166b [ 1926.918154] env[62405]: DEBUG oslo_vmware.api [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52227ca6-bab6-1191-dad3-7103b39de686, 'name': SearchDatastore_Task, 'duration_secs': 0.018383} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1926.922892] env[62405]: DEBUG oslo_concurrency.lockutils [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1926.923186] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: f269844b-a9b4-40a2-8ba4-a62ee59b4e40] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1926.923478] env[62405]: DEBUG oslo_concurrency.lockutils [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1926.923644] env[62405]: DEBUG oslo_concurrency.lockutils [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1926.923828] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1926.925103] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1926.925103] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1926.925103] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0bd0a9ce-75ae-4098-b2dd-12b291d82b50 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.927957] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ba34b6fd-4a66-4f77-8551-22558f3263be {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.932139] env[62405]: DEBUG oslo_vmware.api [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Waiting for the task: (returnval){ [ 1926.932139] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52cbbdca-2ebf-47e0-bcc6-cc3b2cc96a5a" [ 1926.932139] env[62405]: _type = "Task" [ 1926.932139] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1926.936949] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1926.936949] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1926.941125] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f2cdee8c-a121-458d-9b63-281f2c457bf7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.948044] env[62405]: DEBUG oslo_vmware.api [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52cbbdca-2ebf-47e0-bcc6-cc3b2cc96a5a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1926.952508] env[62405]: DEBUG oslo_vmware.api [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Waiting for the task: (returnval){ [ 1926.952508] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52cfc5cd-cf2a-99e4-9348-c600686259c6" [ 1926.952508] env[62405]: _type = "Task" [ 1926.952508] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1926.959509] env[62405]: DEBUG oslo_vmware.api [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52cfc5cd-cf2a-99e4-9348-c600686259c6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1926.964533] env[62405]: DEBUG oslo_concurrency.lockutils [req-d9c89620-94ce-4b6a-a9d0-b879bf6831a6 req-c69cfaf7-79f8-4d97-997b-e89d65c14bff service nova] Releasing lock "refresh_cache-f269844b-a9b4-40a2-8ba4-a62ee59b4e40" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1926.964802] env[62405]: DEBUG nova.compute.manager [req-d9c89620-94ce-4b6a-a9d0-b879bf6831a6 req-c69cfaf7-79f8-4d97-997b-e89d65c14bff service nova] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Received event network-vif-plugged-e84f02c8-cde2-4f59-88cd-ef80e8cc1bba {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1926.965018] env[62405]: DEBUG oslo_concurrency.lockutils [req-d9c89620-94ce-4b6a-a9d0-b879bf6831a6 req-c69cfaf7-79f8-4d97-997b-e89d65c14bff service nova] Acquiring lock "6fcfada3-d73a-4814-bf45-d34b26d76d4a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1926.965241] env[62405]: DEBUG oslo_concurrency.lockutils [req-d9c89620-94ce-4b6a-a9d0-b879bf6831a6 req-c69cfaf7-79f8-4d97-997b-e89d65c14bff service nova] Lock "6fcfada3-d73a-4814-bf45-d34b26d76d4a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1926.965408] env[62405]: DEBUG oslo_concurrency.lockutils [req-d9c89620-94ce-4b6a-a9d0-b879bf6831a6 req-c69cfaf7-79f8-4d97-997b-e89d65c14bff service nova] Lock "6fcfada3-d73a-4814-bf45-d34b26d76d4a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1926.965579] env[62405]: DEBUG nova.compute.manager [req-d9c89620-94ce-4b6a-a9d0-b879bf6831a6 req-c69cfaf7-79f8-4d97-997b-e89d65c14bff service nova] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] No waiting events found dispatching network-vif-plugged-e84f02c8-cde2-4f59-88cd-ef80e8cc1bba {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1926.965747] env[62405]: WARNING nova.compute.manager [req-d9c89620-94ce-4b6a-a9d0-b879bf6831a6 req-c69cfaf7-79f8-4d97-997b-e89d65c14bff service nova] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Received unexpected event network-vif-plugged-e84f02c8-cde2-4f59-88cd-ef80e8cc1bba for instance with vm_state building and task_state spawning. [ 1926.965912] env[62405]: DEBUG nova.compute.manager [req-d9c89620-94ce-4b6a-a9d0-b879bf6831a6 req-c69cfaf7-79f8-4d97-997b-e89d65c14bff service nova] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Received event network-changed-e84f02c8-cde2-4f59-88cd-ef80e8cc1bba {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1926.966083] env[62405]: DEBUG nova.compute.manager [req-d9c89620-94ce-4b6a-a9d0-b879bf6831a6 req-c69cfaf7-79f8-4d97-997b-e89d65c14bff service nova] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Refreshing instance network info cache due to event network-changed-e84f02c8-cde2-4f59-88cd-ef80e8cc1bba. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1926.966273] env[62405]: DEBUG oslo_concurrency.lockutils [req-d9c89620-94ce-4b6a-a9d0-b879bf6831a6 req-c69cfaf7-79f8-4d97-997b-e89d65c14bff service nova] Acquiring lock "refresh_cache-6fcfada3-d73a-4814-bf45-d34b26d76d4a" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1926.966412] env[62405]: DEBUG oslo_concurrency.lockutils [req-d9c89620-94ce-4b6a-a9d0-b879bf6831a6 req-c69cfaf7-79f8-4d97-997b-e89d65c14bff service nova] Acquired lock "refresh_cache-6fcfada3-d73a-4814-bf45-d34b26d76d4a" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1926.966586] env[62405]: DEBUG nova.network.neutron [req-d9c89620-94ce-4b6a-a9d0-b879bf6831a6 req-c69cfaf7-79f8-4d97-997b-e89d65c14bff service nova] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Refreshing network info cache for port e84f02c8-cde2-4f59-88cd-ef80e8cc1bba {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1926.978146] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb14c745-7f83-45ba-b522-b0b8caa0f3a2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.985623] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a8eda69-542f-4e73-97ac-854668bd21c6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.019971] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef777e74-8cb7-4513-8764-7bc43c100ba4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.025558] env[62405]: DEBUG oslo_vmware.api [None req-a2d98da2-3e7e-4018-9544-889fcc0404ac tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Task: {'id': task-1947850, 'name': ReconfigVM_Task, 'duration_secs': 0.145355} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1927.026283] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-a2d98da2-3e7e-4018-9544-889fcc0404ac tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-401540', 'volume_id': '3e3abe30-bd3e-4a0a-a97d-e8583b031955', 'name': 'volume-3e3abe30-bd3e-4a0a-a97d-e8583b031955', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'd186b2f4-3fd1-44be-b8a4-080972aff3a0', 'attached_at': '', 'detached_at': '', 'volume_id': '3e3abe30-bd3e-4a0a-a97d-e8583b031955', 'serial': '3e3abe30-bd3e-4a0a-a97d-e8583b031955'} {{(pid=62405) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1927.030752] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78b4b9a6-6bd9-441a-ae93-bfc850dcedc3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.046134] env[62405]: DEBUG nova.objects.base [None req-50c4893f-da17-4ac3-a25e-8875954fe761 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Object Instance<81aebf11-5d80-4a86-b232-3ecc5f3892c2> lazy-loaded attributes: info_cache,migration_context {{(pid=62405) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1927.046739] env[62405]: DEBUG nova.compute.provider_tree [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1927.048814] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e5d7c75-4e30-4911-b95a-4faf12864f8d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.068556] env[62405]: DEBUG nova.network.neutron [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: a91a6d04-2ec0-4568-bdb3-732d148644de] Updating instance_info_cache with network_info: [{"id": "744277fe-5ae4-47a1-8b6e-f92b066ed2a3", "address": "fa:16:3e:e9:00:52", "network": {"id": "bb161d6c-1986-486e-a6b7-5b1dacc985ee", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-590658377-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7541d8c77a3f434094bc30a4d402bfcb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap744277fe-5a", "ovs_interfaceid": "744277fe-5ae4-47a1-8b6e-f92b066ed2a3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1927.069616] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-808e43d6-5972-4cbf-83ac-4cd2d73118ff {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.075409] env[62405]: DEBUG oslo_vmware.api [None req-50c4893f-da17-4ac3-a25e-8875954fe761 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for the task: (returnval){ [ 1927.075409] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5220c543-fed8-f464-14f1-aae5506a8ed8" [ 1927.075409] env[62405]: _type = "Task" [ 1927.075409] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1927.083867] env[62405]: DEBUG oslo_vmware.api [None req-50c4893f-da17-4ac3-a25e-8875954fe761 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5220c543-fed8-f464-14f1-aae5506a8ed8, 'name': SearchDatastore_Task, 'duration_secs': 0.006235} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1927.084143] env[62405]: DEBUG oslo_concurrency.lockutils [None req-50c4893f-da17-4ac3-a25e-8875954fe761 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1927.403058] env[62405]: DEBUG oslo_vmware.api [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1947851, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1927.417021] env[62405]: DEBUG oslo_concurrency.lockutils [None req-db095d67-8972-456d-a475-f7f18507a1ab tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1927.441817] env[62405]: DEBUG oslo_vmware.api [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52cbbdca-2ebf-47e0-bcc6-cc3b2cc96a5a, 'name': SearchDatastore_Task, 'duration_secs': 0.015467} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1927.442149] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1927.442385] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1927.442597] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1927.461223] env[62405]: DEBUG oslo_vmware.api [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52cfc5cd-cf2a-99e4-9348-c600686259c6, 'name': SearchDatastore_Task, 'duration_secs': 0.017876} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1927.462053] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6c556a1c-8e43-4104-bdf1-3d23a929dc19 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.467271] env[62405]: DEBUG oslo_vmware.api [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Waiting for the task: (returnval){ [ 1927.467271] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]524d6f63-4e56-41ed-9dcb-5c0924686ec7" [ 1927.467271] env[62405]: _type = "Task" [ 1927.467271] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1927.476747] env[62405]: DEBUG oslo_vmware.api [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]524d6f63-4e56-41ed-9dcb-5c0924686ec7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1927.555343] env[62405]: DEBUG nova.scheduler.client.report [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1927.573335] env[62405]: DEBUG oslo_concurrency.lockutils [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Releasing lock "refresh_cache-a91a6d04-2ec0-4568-bdb3-732d148644de" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1927.573657] env[62405]: DEBUG nova.compute.manager [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: a91a6d04-2ec0-4568-bdb3-732d148644de] Instance network_info: |[{"id": "744277fe-5ae4-47a1-8b6e-f92b066ed2a3", "address": "fa:16:3e:e9:00:52", "network": {"id": "bb161d6c-1986-486e-a6b7-5b1dacc985ee", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-590658377-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7541d8c77a3f434094bc30a4d402bfcb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap744277fe-5a", "ovs_interfaceid": "744277fe-5ae4-47a1-8b6e-f92b066ed2a3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1927.574095] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: a91a6d04-2ec0-4568-bdb3-732d148644de] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e9:00:52', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a316376e-2ef0-4b1e-b40c-10321ebd7e1a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '744277fe-5ae4-47a1-8b6e-f92b066ed2a3', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1927.584482] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Creating folder: Project (7541d8c77a3f434094bc30a4d402bfcb). Parent ref: group-v401284. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1927.585637] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7dfc5eb6-2835-4821-b53b-5d0accacb452 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.597565] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Created folder: Project (7541d8c77a3f434094bc30a4d402bfcb) in parent group-v401284. [ 1927.597775] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Creating folder: Instances. Parent ref: group-v401545. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1927.600840] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-670a058a-aaaf-4457-bbda-79d514294370 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.604778] env[62405]: DEBUG nova.compute.manager [req-c2ff9f6c-033f-4df5-b127-a44c9b5f008e req-0a64a9fb-7785-406e-923d-a4f9a5ebb210 service nova] [instance: a91a6d04-2ec0-4568-bdb3-732d148644de] Received event network-vif-plugged-744277fe-5ae4-47a1-8b6e-f92b066ed2a3 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1927.604988] env[62405]: DEBUG oslo_concurrency.lockutils [req-c2ff9f6c-033f-4df5-b127-a44c9b5f008e req-0a64a9fb-7785-406e-923d-a4f9a5ebb210 service nova] Acquiring lock "a91a6d04-2ec0-4568-bdb3-732d148644de-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1927.605210] env[62405]: DEBUG oslo_concurrency.lockutils [req-c2ff9f6c-033f-4df5-b127-a44c9b5f008e req-0a64a9fb-7785-406e-923d-a4f9a5ebb210 service nova] Lock "a91a6d04-2ec0-4568-bdb3-732d148644de-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1927.605381] env[62405]: DEBUG oslo_concurrency.lockutils [req-c2ff9f6c-033f-4df5-b127-a44c9b5f008e req-0a64a9fb-7785-406e-923d-a4f9a5ebb210 service nova] Lock "a91a6d04-2ec0-4568-bdb3-732d148644de-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1927.605552] env[62405]: DEBUG nova.compute.manager [req-c2ff9f6c-033f-4df5-b127-a44c9b5f008e req-0a64a9fb-7785-406e-923d-a4f9a5ebb210 service nova] [instance: a91a6d04-2ec0-4568-bdb3-732d148644de] No waiting events found dispatching network-vif-plugged-744277fe-5ae4-47a1-8b6e-f92b066ed2a3 {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1927.605722] env[62405]: WARNING nova.compute.manager [req-c2ff9f6c-033f-4df5-b127-a44c9b5f008e req-0a64a9fb-7785-406e-923d-a4f9a5ebb210 service nova] [instance: a91a6d04-2ec0-4568-bdb3-732d148644de] Received unexpected event network-vif-plugged-744277fe-5ae4-47a1-8b6e-f92b066ed2a3 for instance with vm_state building and task_state spawning. [ 1927.605882] env[62405]: DEBUG nova.compute.manager [req-c2ff9f6c-033f-4df5-b127-a44c9b5f008e req-0a64a9fb-7785-406e-923d-a4f9a5ebb210 service nova] [instance: a91a6d04-2ec0-4568-bdb3-732d148644de] Received event network-changed-744277fe-5ae4-47a1-8b6e-f92b066ed2a3 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1927.606056] env[62405]: DEBUG nova.compute.manager [req-c2ff9f6c-033f-4df5-b127-a44c9b5f008e req-0a64a9fb-7785-406e-923d-a4f9a5ebb210 service nova] [instance: a91a6d04-2ec0-4568-bdb3-732d148644de] Refreshing instance network info cache due to event network-changed-744277fe-5ae4-47a1-8b6e-f92b066ed2a3. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1927.606249] env[62405]: DEBUG oslo_concurrency.lockutils [req-c2ff9f6c-033f-4df5-b127-a44c9b5f008e req-0a64a9fb-7785-406e-923d-a4f9a5ebb210 service nova] Acquiring lock "refresh_cache-a91a6d04-2ec0-4568-bdb3-732d148644de" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1927.606403] env[62405]: DEBUG oslo_concurrency.lockutils [req-c2ff9f6c-033f-4df5-b127-a44c9b5f008e req-0a64a9fb-7785-406e-923d-a4f9a5ebb210 service nova] Acquired lock "refresh_cache-a91a6d04-2ec0-4568-bdb3-732d148644de" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1927.606557] env[62405]: DEBUG nova.network.neutron [req-c2ff9f6c-033f-4df5-b127-a44c9b5f008e req-0a64a9fb-7785-406e-923d-a4f9a5ebb210 service nova] [instance: a91a6d04-2ec0-4568-bdb3-732d148644de] Refreshing network info cache for port 744277fe-5ae4-47a1-8b6e-f92b066ed2a3 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1927.616974] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Created folder: Instances in parent group-v401545. [ 1927.617244] env[62405]: DEBUG oslo.service.loopingcall [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1927.617431] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a91a6d04-2ec0-4568-bdb3-732d148644de] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1927.619665] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7b55bd5c-e57a-4b8c-aaec-33a7236deb92 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.639808] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1927.639808] env[62405]: value = "task-1947854" [ 1927.639808] env[62405]: _type = "Task" [ 1927.639808] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1927.647108] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947854, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1927.700164] env[62405]: DEBUG nova.compute.manager [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 0d2b305d-d754-413c-afdf-3a2e8f143891] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1927.735324] env[62405]: DEBUG nova.virt.hardware [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1927.735627] env[62405]: DEBUG nova.virt.hardware [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1927.735789] env[62405]: DEBUG nova.virt.hardware [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1927.735973] env[62405]: DEBUG nova.virt.hardware [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1927.736138] env[62405]: DEBUG nova.virt.hardware [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1927.736366] env[62405]: DEBUG nova.virt.hardware [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1927.736746] env[62405]: DEBUG nova.virt.hardware [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1927.737036] env[62405]: DEBUG nova.virt.hardware [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1927.737273] env[62405]: DEBUG nova.virt.hardware [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1927.737487] env[62405]: DEBUG nova.virt.hardware [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1927.737745] env[62405]: DEBUG nova.virt.hardware [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1927.739015] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44463f08-642e-4044-bf71-7a5ae01af6de {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.754646] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-158cb176-b48d-4a5b-8836-713e34b7d37e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.866552] env[62405]: DEBUG nova.network.neutron [req-d9c89620-94ce-4b6a-a9d0-b879bf6831a6 req-c69cfaf7-79f8-4d97-997b-e89d65c14bff service nova] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Updated VIF entry in instance network info cache for port e84f02c8-cde2-4f59-88cd-ef80e8cc1bba. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1927.866926] env[62405]: DEBUG nova.network.neutron [req-d9c89620-94ce-4b6a-a9d0-b879bf6831a6 req-c69cfaf7-79f8-4d97-997b-e89d65c14bff service nova] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Updating instance_info_cache with network_info: [{"id": "e84f02c8-cde2-4f59-88cd-ef80e8cc1bba", "address": "fa:16:3e:f1:09:bd", "network": {"id": "feb681f7-0a8f-4000-89ec-88a027ee7f15", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-478938422-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28cfe90f16b140018a5802c02f751d9c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c894ab55-c869-4530-9702-cb46d173ce94", "external-id": "nsx-vlan-transportzone-792", "segmentation_id": 792, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape84f02c8-cd", "ovs_interfaceid": "e84f02c8-cde2-4f59-88cd-ef80e8cc1bba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1927.903854] env[62405]: DEBUG oslo_vmware.api [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1947851, 'name': PowerOnVM_Task, 'duration_secs': 0.54741} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1927.904136] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1927.904343] env[62405]: INFO nova.compute.manager [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Took 15.15 seconds to spawn the instance on the hypervisor. [ 1927.904530] env[62405]: DEBUG nova.compute.manager [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1927.905329] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3455a399-0d10-42dd-85cd-5888c47e459c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.978673] env[62405]: DEBUG oslo_vmware.api [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]524d6f63-4e56-41ed-9dcb-5c0924686ec7, 'name': SearchDatastore_Task, 'duration_secs': 0.008901} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1927.978964] env[62405]: DEBUG oslo_concurrency.lockutils [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1927.979276] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] f269844b-a9b4-40a2-8ba4-a62ee59b4e40/f269844b-a9b4-40a2-8ba4-a62ee59b4e40.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1927.979563] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1927.979746] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1927.979954] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1a154a37-1ace-445b-a8da-56796314f231 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.982243] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dd957d4e-79c3-4904-89a4-be189fac6807 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.993726] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1927.993920] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1927.994754] env[62405]: DEBUG oslo_vmware.api [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Waiting for the task: (returnval){ [ 1927.994754] env[62405]: value = "task-1947855" [ 1927.994754] env[62405]: _type = "Task" [ 1927.994754] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1927.994950] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ce1a23fb-1de6-4b15-8456-c384aeee3dbd {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1928.007542] env[62405]: DEBUG oslo_vmware.api [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Waiting for the task: (returnval){ [ 1928.007542] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52a05beb-f1c7-8448-676a-31c31ab663b7" [ 1928.007542] env[62405]: _type = "Task" [ 1928.007542] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1928.011418] env[62405]: DEBUG oslo_vmware.api [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1947855, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1928.021173] env[62405]: DEBUG oslo_vmware.api [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52a05beb-f1c7-8448-676a-31c31ab663b7, 'name': SearchDatastore_Task, 'duration_secs': 0.011017} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1928.021911] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-da5ae90a-2ac0-4e8f-acf4-d5fdb03633be {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1928.027094] env[62405]: DEBUG oslo_vmware.api [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Waiting for the task: (returnval){ [ 1928.027094] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52afb0e5-201b-86a0-e564-e2a0d745e46f" [ 1928.027094] env[62405]: _type = "Task" [ 1928.027094] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1928.035250] env[62405]: DEBUG oslo_vmware.api [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52afb0e5-201b-86a0-e564-e2a0d745e46f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1928.064088] env[62405]: DEBUG oslo_concurrency.lockutils [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.390s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1928.064673] env[62405]: DEBUG nova.compute.manager [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1928.069983] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8a887fc5-23f0-42a6-9c1b-69539adbf33e tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.245s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1928.073853] env[62405]: DEBUG nova.objects.instance [None req-8a887fc5-23f0-42a6-9c1b-69539adbf33e tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Lazy-loading 'resources' on Instance uuid 06dbb3e0-876e-4290-81f5-6f95f9d5cb37 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1928.075724] env[62405]: DEBUG nova.objects.instance [None req-a2d98da2-3e7e-4018-9544-889fcc0404ac tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Lazy-loading 'flavor' on Instance uuid d186b2f4-3fd1-44be-b8a4-080972aff3a0 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1928.151765] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947854, 'name': CreateVM_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1928.287918] env[62405]: DEBUG nova.network.neutron [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 0d2b305d-d754-413c-afdf-3a2e8f143891] Successfully updated port: b23a9aa4-c4ec-442c-abdc-2c7b2d5a9961 {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1928.370201] env[62405]: DEBUG oslo_concurrency.lockutils [req-d9c89620-94ce-4b6a-a9d0-b879bf6831a6 req-c69cfaf7-79f8-4d97-997b-e89d65c14bff service nova] Releasing lock "refresh_cache-6fcfada3-d73a-4814-bf45-d34b26d76d4a" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1928.428212] env[62405]: INFO nova.compute.manager [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Took 49.73 seconds to build instance. [ 1928.446499] env[62405]: DEBUG nova.network.neutron [req-c2ff9f6c-033f-4df5-b127-a44c9b5f008e req-0a64a9fb-7785-406e-923d-a4f9a5ebb210 service nova] [instance: a91a6d04-2ec0-4568-bdb3-732d148644de] Updated VIF entry in instance network info cache for port 744277fe-5ae4-47a1-8b6e-f92b066ed2a3. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1928.447274] env[62405]: DEBUG nova.network.neutron [req-c2ff9f6c-033f-4df5-b127-a44c9b5f008e req-0a64a9fb-7785-406e-923d-a4f9a5ebb210 service nova] [instance: a91a6d04-2ec0-4568-bdb3-732d148644de] Updating instance_info_cache with network_info: [{"id": "744277fe-5ae4-47a1-8b6e-f92b066ed2a3", "address": "fa:16:3e:e9:00:52", "network": {"id": "bb161d6c-1986-486e-a6b7-5b1dacc985ee", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-590658377-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7541d8c77a3f434094bc30a4d402bfcb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap744277fe-5a", "ovs_interfaceid": "744277fe-5ae4-47a1-8b6e-f92b066ed2a3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1928.513017] env[62405]: DEBUG oslo_vmware.api [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1947855, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1928.540565] env[62405]: DEBUG oslo_vmware.api [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52afb0e5-201b-86a0-e564-e2a0d745e46f, 'name': SearchDatastore_Task, 'duration_secs': 0.00923} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1928.540847] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1928.541164] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 6fcfada3-d73a-4814-bf45-d34b26d76d4a/6fcfada3-d73a-4814-bf45-d34b26d76d4a.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1928.541471] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-eafd7030-a047-4784-99fb-ba81a140c8a9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1928.550606] env[62405]: DEBUG oslo_vmware.api [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Waiting for the task: (returnval){ [ 1928.550606] env[62405]: value = "task-1947856" [ 1928.550606] env[62405]: _type = "Task" [ 1928.550606] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1928.563474] env[62405]: DEBUG oslo_vmware.api [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1947856, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1928.585647] env[62405]: DEBUG nova.compute.utils [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1928.589249] env[62405]: DEBUG nova.compute.manager [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1928.589769] env[62405]: DEBUG nova.network.neutron [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1928.592678] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a2d98da2-3e7e-4018-9544-889fcc0404ac tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Lock "d186b2f4-3fd1-44be-b8a4-080972aff3a0" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.847s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1928.653302] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947854, 'name': CreateVM_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1928.701425] env[62405]: DEBUG nova.policy [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '13540c2dbc2b43bcb151ec7b5894904c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ba9083cddcc24345b6ea5d2cbbbec5ba', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1928.791344] env[62405]: DEBUG oslo_concurrency.lockutils [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Acquiring lock "refresh_cache-0d2b305d-d754-413c-afdf-3a2e8f143891" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1928.791579] env[62405]: DEBUG oslo_concurrency.lockutils [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Acquired lock "refresh_cache-0d2b305d-d754-413c-afdf-3a2e8f143891" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1928.791858] env[62405]: DEBUG nova.network.neutron [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 0d2b305d-d754-413c-afdf-3a2e8f143891] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1928.928477] env[62405]: DEBUG oslo_concurrency.lockutils [None req-97d98cee-68a0-4703-b5af-d896b3f103cc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Lock "b495f9e6-60c8-4509-a34f-2e7ed59b6d82" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 51.247s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1928.929517] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a78c28c-a6e4-4179-a065-7bafdf0806b4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1928.937925] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-385edf46-6b68-4f2f-b812-8d550a92adab {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1928.981187] env[62405]: DEBUG oslo_concurrency.lockutils [req-c2ff9f6c-033f-4df5-b127-a44c9b5f008e req-0a64a9fb-7785-406e-923d-a4f9a5ebb210 service nova] Releasing lock "refresh_cache-a91a6d04-2ec0-4568-bdb3-732d148644de" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1928.981464] env[62405]: DEBUG nova.compute.manager [req-c2ff9f6c-033f-4df5-b127-a44c9b5f008e req-0a64a9fb-7785-406e-923d-a4f9a5ebb210 service nova] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Received event network-vif-unplugged-dba92750-bf41-4683-b71d-128391ff29d0 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1928.981661] env[62405]: DEBUG oslo_concurrency.lockutils [req-c2ff9f6c-033f-4df5-b127-a44c9b5f008e req-0a64a9fb-7785-406e-923d-a4f9a5ebb210 service nova] Acquiring lock "f16e3d13-6db6-4f61-b0e4-661856a9166b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1928.981877] env[62405]: DEBUG oslo_concurrency.lockutils [req-c2ff9f6c-033f-4df5-b127-a44c9b5f008e req-0a64a9fb-7785-406e-923d-a4f9a5ebb210 service nova] Lock "f16e3d13-6db6-4f61-b0e4-661856a9166b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1928.982059] env[62405]: DEBUG oslo_concurrency.lockutils [req-c2ff9f6c-033f-4df5-b127-a44c9b5f008e req-0a64a9fb-7785-406e-923d-a4f9a5ebb210 service nova] Lock "f16e3d13-6db6-4f61-b0e4-661856a9166b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1928.982235] env[62405]: DEBUG nova.compute.manager [req-c2ff9f6c-033f-4df5-b127-a44c9b5f008e req-0a64a9fb-7785-406e-923d-a4f9a5ebb210 service nova] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] No waiting events found dispatching network-vif-unplugged-dba92750-bf41-4683-b71d-128391ff29d0 {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1928.982454] env[62405]: WARNING nova.compute.manager [req-c2ff9f6c-033f-4df5-b127-a44c9b5f008e req-0a64a9fb-7785-406e-923d-a4f9a5ebb210 service nova] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Received unexpected event network-vif-unplugged-dba92750-bf41-4683-b71d-128391ff29d0 for instance with vm_state shelved_offloaded and task_state None. [ 1928.982593] env[62405]: DEBUG nova.compute.manager [req-c2ff9f6c-033f-4df5-b127-a44c9b5f008e req-0a64a9fb-7785-406e-923d-a4f9a5ebb210 service nova] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Received event network-changed-dba92750-bf41-4683-b71d-128391ff29d0 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1928.986020] env[62405]: DEBUG nova.compute.manager [req-c2ff9f6c-033f-4df5-b127-a44c9b5f008e req-0a64a9fb-7785-406e-923d-a4f9a5ebb210 service nova] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Refreshing instance network info cache due to event network-changed-dba92750-bf41-4683-b71d-128391ff29d0. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1928.986020] env[62405]: DEBUG oslo_concurrency.lockutils [req-c2ff9f6c-033f-4df5-b127-a44c9b5f008e req-0a64a9fb-7785-406e-923d-a4f9a5ebb210 service nova] Acquiring lock "refresh_cache-f16e3d13-6db6-4f61-b0e4-661856a9166b" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1928.986020] env[62405]: DEBUG oslo_concurrency.lockutils [req-c2ff9f6c-033f-4df5-b127-a44c9b5f008e req-0a64a9fb-7785-406e-923d-a4f9a5ebb210 service nova] Acquired lock "refresh_cache-f16e3d13-6db6-4f61-b0e4-661856a9166b" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1928.986020] env[62405]: DEBUG nova.network.neutron [req-c2ff9f6c-033f-4df5-b127-a44c9b5f008e req-0a64a9fb-7785-406e-923d-a4f9a5ebb210 service nova] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Refreshing network info cache for port dba92750-bf41-4683-b71d-128391ff29d0 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1928.988741] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84c5298a-3525-49b9-abb6-b0f28afc28ac {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.000613] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1171cf56-a4e1-491c-b97c-b494d903c9b0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.018955] env[62405]: DEBUG nova.compute.provider_tree [None req-8a887fc5-23f0-42a6-9c1b-69539adbf33e tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1929.023630] env[62405]: DEBUG oslo_vmware.api [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1947855, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.662844} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1929.024184] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] f269844b-a9b4-40a2-8ba4-a62ee59b4e40/f269844b-a9b4-40a2-8ba4-a62ee59b4e40.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1929.024389] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: f269844b-a9b4-40a2-8ba4-a62ee59b4e40] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1929.024565] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-db3b4f39-9b8d-4a0d-822d-4e9b1249f60d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.032208] env[62405]: DEBUG oslo_vmware.api [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Waiting for the task: (returnval){ [ 1929.032208] env[62405]: value = "task-1947857" [ 1929.032208] env[62405]: _type = "Task" [ 1929.032208] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1929.040447] env[62405]: DEBUG oslo_vmware.api [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1947857, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1929.060637] env[62405]: DEBUG oslo_vmware.api [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1947856, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1929.093044] env[62405]: DEBUG nova.compute.manager [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1929.152283] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947854, 'name': CreateVM_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1929.331315] env[62405]: DEBUG nova.network.neutron [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 0d2b305d-d754-413c-afdf-3a2e8f143891] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1929.436455] env[62405]: DEBUG nova.network.neutron [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] Successfully created port: d440b728-2371-4e75-bb9f-2330f0318cae {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1929.448057] env[62405]: DEBUG oslo_concurrency.lockutils [None req-27ffe6b6-6c3c-447c-88bf-55f741b0594f tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Acquiring lock "d186b2f4-3fd1-44be-b8a4-080972aff3a0" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1929.448057] env[62405]: DEBUG oslo_concurrency.lockutils [None req-27ffe6b6-6c3c-447c-88bf-55f741b0594f tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Lock "d186b2f4-3fd1-44be-b8a4-080972aff3a0" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1929.448057] env[62405]: DEBUG nova.compute.manager [None req-27ffe6b6-6c3c-447c-88bf-55f741b0594f tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1929.448057] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f640078-d51f-4c33-9593-ff8a13fbe770 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.456803] env[62405]: DEBUG nova.compute.manager [None req-27ffe6b6-6c3c-447c-88bf-55f741b0594f tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62405) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1929.457493] env[62405]: DEBUG nova.objects.instance [None req-27ffe6b6-6c3c-447c-88bf-55f741b0594f tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Lazy-loading 'flavor' on Instance uuid d186b2f4-3fd1-44be-b8a4-080972aff3a0 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1929.494248] env[62405]: DEBUG oslo_concurrency.lockutils [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Acquiring lock "f16e3d13-6db6-4f61-b0e4-661856a9166b" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1929.525164] env[62405]: DEBUG nova.scheduler.client.report [None req-8a887fc5-23f0-42a6-9c1b-69539adbf33e tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1929.542544] env[62405]: DEBUG oslo_vmware.api [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1947857, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.210752} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1929.542808] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: f269844b-a9b4-40a2-8ba4-a62ee59b4e40] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1929.543736] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ae016bc-2255-432c-9d41-3b5f821dcf7b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.568934] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: f269844b-a9b4-40a2-8ba4-a62ee59b4e40] Reconfiguring VM instance instance-0000005e to attach disk [datastore1] f269844b-a9b4-40a2-8ba4-a62ee59b4e40/f269844b-a9b4-40a2-8ba4-a62ee59b4e40.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1929.575386] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f748c10c-49b5-4a1d-a169-55152098d01c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.600785] env[62405]: DEBUG oslo_vmware.api [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1947856, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1929.604901] env[62405]: DEBUG oslo_vmware.api [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Waiting for the task: (returnval){ [ 1929.604901] env[62405]: value = "task-1947858" [ 1929.604901] env[62405]: _type = "Task" [ 1929.604901] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1929.614501] env[62405]: DEBUG oslo_vmware.api [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1947858, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1929.649490] env[62405]: DEBUG nova.network.neutron [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 0d2b305d-d754-413c-afdf-3a2e8f143891] Updating instance_info_cache with network_info: [{"id": "b23a9aa4-c4ec-442c-abdc-2c7b2d5a9961", "address": "fa:16:3e:77:ff:03", "network": {"id": "bb161d6c-1986-486e-a6b7-5b1dacc985ee", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-590658377-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7541d8c77a3f434094bc30a4d402bfcb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb23a9aa4-c4", "ovs_interfaceid": "b23a9aa4-c4ec-442c-abdc-2c7b2d5a9961", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1929.656988] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947854, 'name': CreateVM_Task, 'duration_secs': 1.600881} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1929.657591] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a91a6d04-2ec0-4568-bdb3-732d148644de] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1929.658695] env[62405]: DEBUG oslo_concurrency.lockutils [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1929.658971] env[62405]: DEBUG oslo_concurrency.lockutils [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1929.659609] env[62405]: DEBUG oslo_concurrency.lockutils [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1929.659963] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f4dc0588-24d4-4ed2-9cd1-1f6f08fed791 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.667287] env[62405]: DEBUG oslo_vmware.api [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Waiting for the task: (returnval){ [ 1929.667287] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52b95bad-8a13-86c9-f502-68bdc30644c9" [ 1929.667287] env[62405]: _type = "Task" [ 1929.667287] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1929.678661] env[62405]: DEBUG oslo_vmware.api [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52b95bad-8a13-86c9-f502-68bdc30644c9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1929.797695] env[62405]: DEBUG nova.compute.manager [req-537a7fed-fe1c-4e15-9070-efd31760cf1b req-fe67274a-e231-45c3-b5e6-e060f7a04938 service nova] [instance: 0d2b305d-d754-413c-afdf-3a2e8f143891] Received event network-vif-plugged-b23a9aa4-c4ec-442c-abdc-2c7b2d5a9961 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1929.797979] env[62405]: DEBUG oslo_concurrency.lockutils [req-537a7fed-fe1c-4e15-9070-efd31760cf1b req-fe67274a-e231-45c3-b5e6-e060f7a04938 service nova] Acquiring lock "0d2b305d-d754-413c-afdf-3a2e8f143891-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1929.799196] env[62405]: DEBUG oslo_concurrency.lockutils [req-537a7fed-fe1c-4e15-9070-efd31760cf1b req-fe67274a-e231-45c3-b5e6-e060f7a04938 service nova] Lock "0d2b305d-d754-413c-afdf-3a2e8f143891-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1929.799196] env[62405]: DEBUG oslo_concurrency.lockutils [req-537a7fed-fe1c-4e15-9070-efd31760cf1b req-fe67274a-e231-45c3-b5e6-e060f7a04938 service nova] Lock "0d2b305d-d754-413c-afdf-3a2e8f143891-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1929.799196] env[62405]: DEBUG nova.compute.manager [req-537a7fed-fe1c-4e15-9070-efd31760cf1b req-fe67274a-e231-45c3-b5e6-e060f7a04938 service nova] [instance: 0d2b305d-d754-413c-afdf-3a2e8f143891] No waiting events found dispatching network-vif-plugged-b23a9aa4-c4ec-442c-abdc-2c7b2d5a9961 {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1929.799196] env[62405]: WARNING nova.compute.manager [req-537a7fed-fe1c-4e15-9070-efd31760cf1b req-fe67274a-e231-45c3-b5e6-e060f7a04938 service nova] [instance: 0d2b305d-d754-413c-afdf-3a2e8f143891] Received unexpected event network-vif-plugged-b23a9aa4-c4ec-442c-abdc-2c7b2d5a9961 for instance with vm_state building and task_state spawning. [ 1929.799196] env[62405]: DEBUG nova.compute.manager [req-537a7fed-fe1c-4e15-9070-efd31760cf1b req-fe67274a-e231-45c3-b5e6-e060f7a04938 service nova] [instance: 0d2b305d-d754-413c-afdf-3a2e8f143891] Received event network-changed-b23a9aa4-c4ec-442c-abdc-2c7b2d5a9961 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1929.799396] env[62405]: DEBUG nova.compute.manager [req-537a7fed-fe1c-4e15-9070-efd31760cf1b req-fe67274a-e231-45c3-b5e6-e060f7a04938 service nova] [instance: 0d2b305d-d754-413c-afdf-3a2e8f143891] Refreshing instance network info cache due to event network-changed-b23a9aa4-c4ec-442c-abdc-2c7b2d5a9961. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1929.799396] env[62405]: DEBUG oslo_concurrency.lockutils [req-537a7fed-fe1c-4e15-9070-efd31760cf1b req-fe67274a-e231-45c3-b5e6-e060f7a04938 service nova] Acquiring lock "refresh_cache-0d2b305d-d754-413c-afdf-3a2e8f143891" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1929.964943] env[62405]: DEBUG nova.network.neutron [req-c2ff9f6c-033f-4df5-b127-a44c9b5f008e req-0a64a9fb-7785-406e-923d-a4f9a5ebb210 service nova] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Updated VIF entry in instance network info cache for port dba92750-bf41-4683-b71d-128391ff29d0. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1929.965570] env[62405]: DEBUG nova.network.neutron [req-c2ff9f6c-033f-4df5-b127-a44c9b5f008e req-0a64a9fb-7785-406e-923d-a4f9a5ebb210 service nova] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Updating instance_info_cache with network_info: [{"id": "dba92750-bf41-4683-b71d-128391ff29d0", "address": "fa:16:3e:e6:78:c4", "network": {"id": "72000fdf-4f7a-4c95-a7ac-d8404249f55c", "bridge": null, "label": "tempest-ServersNegativeTestJSON-589425764-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "521150d8f23f4f76a0c785481c99e897", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapdba92750-bf", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1930.030958] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8a887fc5-23f0-42a6-9c1b-69539adbf33e tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.962s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1930.033683] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.276s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1930.035772] env[62405]: INFO nova.compute.claims [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: ec0a05fc-4a11-4e07-a03c-e357a7a750ab] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1930.070962] env[62405]: DEBUG oslo_vmware.api [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1947856, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1930.105506] env[62405]: DEBUG nova.compute.manager [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1930.116869] env[62405]: DEBUG oslo_vmware.api [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1947858, 'name': ReconfigVM_Task, 'duration_secs': 0.415747} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1930.117486] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: f269844b-a9b4-40a2-8ba4-a62ee59b4e40] Reconfigured VM instance instance-0000005e to attach disk [datastore1] f269844b-a9b4-40a2-8ba4-a62ee59b4e40/f269844b-a9b4-40a2-8ba4-a62ee59b4e40.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1930.117900] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-52a35d73-fb1e-48b7-ba8e-3603118e73a9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.124453] env[62405]: DEBUG oslo_vmware.api [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Waiting for the task: (returnval){ [ 1930.124453] env[62405]: value = "task-1947859" [ 1930.124453] env[62405]: _type = "Task" [ 1930.124453] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1930.134309] env[62405]: DEBUG oslo_vmware.api [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1947859, 'name': Rename_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1930.137098] env[62405]: DEBUG nova.virt.hardware [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1930.137333] env[62405]: DEBUG nova.virt.hardware [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1930.137489] env[62405]: DEBUG nova.virt.hardware [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1930.137670] env[62405]: DEBUG nova.virt.hardware [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1930.137816] env[62405]: DEBUG nova.virt.hardware [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1930.137961] env[62405]: DEBUG nova.virt.hardware [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1930.138215] env[62405]: DEBUG nova.virt.hardware [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1930.138384] env[62405]: DEBUG nova.virt.hardware [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1930.138554] env[62405]: DEBUG nova.virt.hardware [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1930.139018] env[62405]: DEBUG nova.virt.hardware [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1930.139018] env[62405]: DEBUG nova.virt.hardware [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1930.139705] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df5b5eab-b2df-461b-8cd7-facf04a6fe82 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.147768] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85398af0-3a94-4a04-ad19-90958b387188 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.163831] env[62405]: DEBUG oslo_concurrency.lockutils [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Releasing lock "refresh_cache-0d2b305d-d754-413c-afdf-3a2e8f143891" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1930.164549] env[62405]: DEBUG nova.compute.manager [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 0d2b305d-d754-413c-afdf-3a2e8f143891] Instance network_info: |[{"id": "b23a9aa4-c4ec-442c-abdc-2c7b2d5a9961", "address": "fa:16:3e:77:ff:03", "network": {"id": "bb161d6c-1986-486e-a6b7-5b1dacc985ee", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-590658377-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7541d8c77a3f434094bc30a4d402bfcb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb23a9aa4-c4", "ovs_interfaceid": "b23a9aa4-c4ec-442c-abdc-2c7b2d5a9961", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1930.164685] env[62405]: DEBUG oslo_concurrency.lockutils [req-537a7fed-fe1c-4e15-9070-efd31760cf1b req-fe67274a-e231-45c3-b5e6-e060f7a04938 service nova] Acquired lock "refresh_cache-0d2b305d-d754-413c-afdf-3a2e8f143891" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1930.164870] env[62405]: DEBUG nova.network.neutron [req-537a7fed-fe1c-4e15-9070-efd31760cf1b req-fe67274a-e231-45c3-b5e6-e060f7a04938 service nova] [instance: 0d2b305d-d754-413c-afdf-3a2e8f143891] Refreshing network info cache for port b23a9aa4-c4ec-442c-abdc-2c7b2d5a9961 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1930.166205] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 0d2b305d-d754-413c-afdf-3a2e8f143891] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:77:ff:03', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a316376e-2ef0-4b1e-b40c-10321ebd7e1a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b23a9aa4-c4ec-442c-abdc-2c7b2d5a9961', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1930.173744] env[62405]: DEBUG oslo.service.loopingcall [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1930.174777] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0d2b305d-d754-413c-afdf-3a2e8f143891] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1930.177861] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e1482f03-923b-4b7c-b223-a980bbfc0ab2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.198765] env[62405]: DEBUG oslo_vmware.api [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52b95bad-8a13-86c9-f502-68bdc30644c9, 'name': SearchDatastore_Task, 'duration_secs': 0.021475} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1930.200216] env[62405]: DEBUG oslo_concurrency.lockutils [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1930.200477] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: a91a6d04-2ec0-4568-bdb3-732d148644de] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1930.200714] env[62405]: DEBUG oslo_concurrency.lockutils [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1930.200862] env[62405]: DEBUG oslo_concurrency.lockutils [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1930.201065] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1930.201318] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1930.201318] env[62405]: value = "task-1947860" [ 1930.201318] env[62405]: _type = "Task" [ 1930.201318] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1930.201503] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-de0a405c-040d-4368-b552-9c8d93c6f3ef {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.214184] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947860, 'name': CreateVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1930.215650] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1930.215850] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1930.216533] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-35f1c2a4-11c8-4593-a0a2-822bd6e76b0b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.221759] env[62405]: DEBUG oslo_vmware.api [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Waiting for the task: (returnval){ [ 1930.221759] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52686a68-8824-6c54-95c8-e1adea9b7d2f" [ 1930.221759] env[62405]: _type = "Task" [ 1930.221759] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1930.231245] env[62405]: DEBUG oslo_vmware.api [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52686a68-8824-6c54-95c8-e1adea9b7d2f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1930.464838] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-27ffe6b6-6c3c-447c-88bf-55f741b0594f tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1930.465171] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a3f00e96-c22f-4632-b76b-1cb376c7260e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.472606] env[62405]: DEBUG oslo_vmware.api [None req-27ffe6b6-6c3c-447c-88bf-55f741b0594f tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Waiting for the task: (returnval){ [ 1930.472606] env[62405]: value = "task-1947861" [ 1930.472606] env[62405]: _type = "Task" [ 1930.472606] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1930.480145] env[62405]: DEBUG oslo_vmware.api [None req-27ffe6b6-6c3c-447c-88bf-55f741b0594f tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Task: {'id': task-1947861, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1930.572082] env[62405]: DEBUG oslo_vmware.api [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1947856, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1930.637031] env[62405]: DEBUG oslo_vmware.api [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1947859, 'name': Rename_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1930.717543] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947860, 'name': CreateVM_Task} progress is 25%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1930.732162] env[62405]: DEBUG oslo_vmware.api [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52686a68-8824-6c54-95c8-e1adea9b7d2f, 'name': SearchDatastore_Task, 'duration_secs': 0.01039} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1930.732953] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-08ae7b14-3b4f-4401-86b2-d541e7b2a951 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.738746] env[62405]: DEBUG oslo_vmware.api [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Waiting for the task: (returnval){ [ 1930.738746] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52a42754-5e92-ea04-d42a-e5691199492c" [ 1930.738746] env[62405]: _type = "Task" [ 1930.738746] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1930.747280] env[62405]: DEBUG oslo_vmware.api [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52a42754-5e92-ea04-d42a-e5691199492c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1930.895227] env[62405]: DEBUG nova.network.neutron [req-537a7fed-fe1c-4e15-9070-efd31760cf1b req-fe67274a-e231-45c3-b5e6-e060f7a04938 service nova] [instance: 0d2b305d-d754-413c-afdf-3a2e8f143891] Updated VIF entry in instance network info cache for port b23a9aa4-c4ec-442c-abdc-2c7b2d5a9961. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1930.895650] env[62405]: DEBUG nova.network.neutron [req-537a7fed-fe1c-4e15-9070-efd31760cf1b req-fe67274a-e231-45c3-b5e6-e060f7a04938 service nova] [instance: 0d2b305d-d754-413c-afdf-3a2e8f143891] Updating instance_info_cache with network_info: [{"id": "b23a9aa4-c4ec-442c-abdc-2c7b2d5a9961", "address": "fa:16:3e:77:ff:03", "network": {"id": "bb161d6c-1986-486e-a6b7-5b1dacc985ee", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-590658377-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7541d8c77a3f434094bc30a4d402bfcb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb23a9aa4-c4", "ovs_interfaceid": "b23a9aa4-c4ec-442c-abdc-2c7b2d5a9961", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1930.984340] env[62405]: DEBUG oslo_vmware.api [None req-27ffe6b6-6c3c-447c-88bf-55f741b0594f tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Task: {'id': task-1947861, 'name': PowerOffVM_Task, 'duration_secs': 0.405557} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1930.984896] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-27ffe6b6-6c3c-447c-88bf-55f741b0594f tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1930.985178] env[62405]: DEBUG nova.compute.manager [None req-27ffe6b6-6c3c-447c-88bf-55f741b0594f tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1930.987398] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e437461-1ff2-46df-9c76-c3a2f449f0eb {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.072017] env[62405]: DEBUG oslo_vmware.api [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1947856, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.079254} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1931.072017] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 6fcfada3-d73a-4814-bf45-d34b26d76d4a/6fcfada3-d73a-4814-bf45-d34b26d76d4a.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1931.072017] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1931.072232] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-902c9bc2-b2f8-44a2-8ec5-508efa85932e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.078512] env[62405]: DEBUG oslo_vmware.api [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Waiting for the task: (returnval){ [ 1931.078512] env[62405]: value = "task-1947862" [ 1931.078512] env[62405]: _type = "Task" [ 1931.078512] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1931.086621] env[62405]: DEBUG oslo_vmware.api [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1947862, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1931.135365] env[62405]: DEBUG oslo_vmware.api [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1947859, 'name': Rename_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1931.214265] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947860, 'name': CreateVM_Task, 'duration_secs': 0.870544} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1931.214443] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0d2b305d-d754-413c-afdf-3a2e8f143891] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1931.215165] env[62405]: DEBUG oslo_concurrency.lockutils [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1931.215311] env[62405]: DEBUG oslo_concurrency.lockutils [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1931.215635] env[62405]: DEBUG oslo_concurrency.lockutils [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1931.215889] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-28a3f743-1f80-4cc2-9615-17d3c9550fc3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.220598] env[62405]: DEBUG oslo_vmware.api [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Waiting for the task: (returnval){ [ 1931.220598] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]522b24d2-7e0d-4c74-7261-5982f879dcfb" [ 1931.220598] env[62405]: _type = "Task" [ 1931.220598] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1931.228226] env[62405]: DEBUG oslo_vmware.api [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]522b24d2-7e0d-4c74-7261-5982f879dcfb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1931.248021] env[62405]: DEBUG oslo_vmware.api [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52a42754-5e92-ea04-d42a-e5691199492c, 'name': SearchDatastore_Task, 'duration_secs': 0.013465} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1931.248202] env[62405]: DEBUG oslo_concurrency.lockutils [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1931.248466] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] a91a6d04-2ec0-4568-bdb3-732d148644de/a91a6d04-2ec0-4568-bdb3-732d148644de.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1931.248716] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1b68417e-5421-4596-8065-1fe2fa260be1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.255381] env[62405]: DEBUG oslo_vmware.api [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Waiting for the task: (returnval){ [ 1931.255381] env[62405]: value = "task-1947863" [ 1931.255381] env[62405]: _type = "Task" [ 1931.255381] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1931.264103] env[62405]: DEBUG oslo_vmware.api [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1947863, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1931.399020] env[62405]: DEBUG oslo_concurrency.lockutils [req-537a7fed-fe1c-4e15-9070-efd31760cf1b req-fe67274a-e231-45c3-b5e6-e060f7a04938 service nova] Releasing lock "refresh_cache-0d2b305d-d754-413c-afdf-3a2e8f143891" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1931.588195] env[62405]: DEBUG oslo_vmware.api [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1947862, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065765} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1931.588526] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1931.589366] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60e450f3-d2c5-448f-af84-43df497c3104 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.610347] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Reconfiguring VM instance instance-0000005d to attach disk [datastore1] 6fcfada3-d73a-4814-bf45-d34b26d76d4a/6fcfada3-d73a-4814-bf45-d34b26d76d4a.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1931.610603] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eb90666c-d744-452d-b8ad-a8992b5ba45c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.630261] env[62405]: DEBUG oslo_vmware.api [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Waiting for the task: (returnval){ [ 1931.630261] env[62405]: value = "task-1947864" [ 1931.630261] env[62405]: _type = "Task" [ 1931.630261] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1931.636699] env[62405]: DEBUG oslo_vmware.api [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1947859, 'name': Rename_Task, 'duration_secs': 1.173221} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1931.636910] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: f269844b-a9b4-40a2-8ba4-a62ee59b4e40] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1931.637186] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c2067f47-14be-44fc-987f-dbc6adf7edda {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.641518] env[62405]: DEBUG oslo_vmware.api [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1947864, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1931.647031] env[62405]: DEBUG oslo_vmware.api [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Waiting for the task: (returnval){ [ 1931.647031] env[62405]: value = "task-1947865" [ 1931.647031] env[62405]: _type = "Task" [ 1931.647031] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1931.652875] env[62405]: DEBUG oslo_vmware.api [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1947865, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1931.738734] env[62405]: DEBUG oslo_vmware.api [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]522b24d2-7e0d-4c74-7261-5982f879dcfb, 'name': SearchDatastore_Task, 'duration_secs': 0.027362} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1931.739090] env[62405]: DEBUG oslo_concurrency.lockutils [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1931.739362] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 0d2b305d-d754-413c-afdf-3a2e8f143891] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1931.739703] env[62405]: DEBUG oslo_concurrency.lockutils [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1931.739799] env[62405]: DEBUG oslo_concurrency.lockutils [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1931.739960] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1931.740286] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-55d06eb7-d38d-4d37-a61f-0dd43cb71bc8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.764021] env[62405]: DEBUG oslo_vmware.api [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1947863, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1931.827215] env[62405]: DEBUG nova.compute.manager [req-47b1c050-849c-4231-b1a4-f9fdb8e84230 req-ea060228-5525-47b1-ae1b-0a491342dbaa service nova] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Received event network-changed-a7c7d269-027f-42d9-819a-e04ab445d816 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1931.827482] env[62405]: DEBUG nova.compute.manager [req-47b1c050-849c-4231-b1a4-f9fdb8e84230 req-ea060228-5525-47b1-ae1b-0a491342dbaa service nova] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Refreshing instance network info cache due to event network-changed-a7c7d269-027f-42d9-819a-e04ab445d816. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1931.827803] env[62405]: DEBUG oslo_concurrency.lockutils [req-47b1c050-849c-4231-b1a4-f9fdb8e84230 req-ea060228-5525-47b1-ae1b-0a491342dbaa service nova] Acquiring lock "refresh_cache-b495f9e6-60c8-4509-a34f-2e7ed59b6d82" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1931.827983] env[62405]: DEBUG oslo_concurrency.lockutils [req-47b1c050-849c-4231-b1a4-f9fdb8e84230 req-ea060228-5525-47b1-ae1b-0a491342dbaa service nova] Acquired lock "refresh_cache-b495f9e6-60c8-4509-a34f-2e7ed59b6d82" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1931.828215] env[62405]: DEBUG nova.network.neutron [req-47b1c050-849c-4231-b1a4-f9fdb8e84230 req-ea060228-5525-47b1-ae1b-0a491342dbaa service nova] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Refreshing network info cache for port a7c7d269-027f-42d9-819a-e04ab445d816 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1932.142321] env[62405]: DEBUG oslo_vmware.api [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1947864, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1932.154651] env[62405]: DEBUG oslo_vmware.api [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1947865, 'name': PowerOnVM_Task, 'duration_secs': 0.453569} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1932.154950] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: f269844b-a9b4-40a2-8ba4-a62ee59b4e40] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1932.155192] env[62405]: INFO nova.compute.manager [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: f269844b-a9b4-40a2-8ba4-a62ee59b4e40] Took 9.46 seconds to spawn the instance on the hypervisor. [ 1932.155385] env[62405]: DEBUG nova.compute.manager [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: f269844b-a9b4-40a2-8ba4-a62ee59b4e40] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1932.156284] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f95cf9b-a665-4767-819c-1f4155a66ebc {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.265257] env[62405]: DEBUG oslo_vmware.api [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1947863, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1932.280652] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1932.280821] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1932.281585] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-51f45a9a-b52f-4c10-9e24-bc6638bf5c3f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.288071] env[62405]: DEBUG oslo_vmware.api [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Waiting for the task: (returnval){ [ 1932.288071] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52d19d62-f3cf-e3c7-cb06-2462d025572f" [ 1932.288071] env[62405]: _type = "Task" [ 1932.288071] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1932.295855] env[62405]: DEBUG oslo_vmware.api [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52d19d62-f3cf-e3c7-cb06-2462d025572f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1932.534576] env[62405]: DEBUG nova.network.neutron [req-47b1c050-849c-4231-b1a4-f9fdb8e84230 req-ea060228-5525-47b1-ae1b-0a491342dbaa service nova] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Updated VIF entry in instance network info cache for port a7c7d269-027f-42d9-819a-e04ab445d816. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1932.534946] env[62405]: DEBUG nova.network.neutron [req-47b1c050-849c-4231-b1a4-f9fdb8e84230 req-ea060228-5525-47b1-ae1b-0a491342dbaa service nova] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Updating instance_info_cache with network_info: [{"id": "a7c7d269-027f-42d9-819a-e04ab445d816", "address": "fa:16:3e:c7:9c:e0", "network": {"id": "f9883b88-e1ff-4499-9214-4cc672383a10", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1580742860-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dd9a1a4650b34e388c50c7575cf09a7c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0a88b707-352e-4be7-b1d6-ad6074b40ed9", "external-id": "nsx-vlan-transportzone-789", "segmentation_id": 789, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa7c7d269-02", "ovs_interfaceid": "a7c7d269-027f-42d9-819a-e04ab445d816", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1932.642310] env[62405]: DEBUG oslo_vmware.api [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1947864, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1932.765520] env[62405]: DEBUG oslo_vmware.api [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1947863, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1932.796639] env[62405]: DEBUG oslo_vmware.api [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52d19d62-f3cf-e3c7-cb06-2462d025572f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1933.141535] env[62405]: DEBUG oslo_vmware.api [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1947864, 'name': ReconfigVM_Task, 'duration_secs': 1.413006} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1933.141824] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Reconfigured VM instance instance-0000005d to attach disk [datastore1] 6fcfada3-d73a-4814-bf45-d34b26d76d4a/6fcfada3-d73a-4814-bf45-d34b26d76d4a.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1933.142467] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dd3c8c39-f3d1-4ee3-8c64-c6c35f73783a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.149373] env[62405]: DEBUG oslo_vmware.api [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Waiting for the task: (returnval){ [ 1933.149373] env[62405]: value = "task-1947866" [ 1933.149373] env[62405]: _type = "Task" [ 1933.149373] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1933.162502] env[62405]: DEBUG oslo_vmware.api [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1947866, 'name': Rename_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1933.266579] env[62405]: DEBUG oslo_vmware.api [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1947863, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1933.308991] env[62405]: DEBUG oslo_vmware.api [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52d19d62-f3cf-e3c7-cb06-2462d025572f, 'name': SearchDatastore_Task, 'duration_secs': 0.880901} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1933.309273] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-77921e10-0316-45a8-8177-41c5c58efd15 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.315584] env[62405]: DEBUG oslo_vmware.api [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Waiting for the task: (returnval){ [ 1933.315584] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]528044d6-45d2-ff3e-bbff-741150d52485" [ 1933.315584] env[62405]: _type = "Task" [ 1933.315584] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1933.329257] env[62405]: DEBUG oslo_vmware.api [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]528044d6-45d2-ff3e-bbff-741150d52485, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1933.339819] env[62405]: INFO nova.scheduler.client.report [None req-8a887fc5-23f0-42a6-9c1b-69539adbf33e tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Deleted allocations for instance 06dbb3e0-876e-4290-81f5-6f95f9d5cb37 [ 1933.472379] env[62405]: DEBUG oslo_concurrency.lockutils [req-c2ff9f6c-033f-4df5-b127-a44c9b5f008e req-0a64a9fb-7785-406e-923d-a4f9a5ebb210 service nova] Releasing lock "refresh_cache-f16e3d13-6db6-4f61-b0e4-661856a9166b" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1933.502264] env[62405]: DEBUG oslo_concurrency.lockutils [None req-27ffe6b6-6c3c-447c-88bf-55f741b0594f tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Lock "d186b2f4-3fd1-44be-b8a4-080972aff3a0" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 4.056s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1933.537894] env[62405]: DEBUG oslo_concurrency.lockutils [req-47b1c050-849c-4231-b1a4-f9fdb8e84230 req-ea060228-5525-47b1-ae1b-0a491342dbaa service nova] Releasing lock "refresh_cache-b495f9e6-60c8-4509-a34f-2e7ed59b6d82" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1933.658830] env[62405]: DEBUG oslo_vmware.api [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1947866, 'name': Rename_Task, 'duration_secs': 0.157951} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1933.659248] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1933.659726] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2f2fbc72-97f7-47ef-8fca-8a4c2d4cfcc9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.665397] env[62405]: DEBUG oslo_vmware.api [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Waiting for the task: (returnval){ [ 1933.665397] env[62405]: value = "task-1947867" [ 1933.665397] env[62405]: _type = "Task" [ 1933.665397] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1933.681093] env[62405]: DEBUG oslo_vmware.api [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1947867, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1933.681524] env[62405]: INFO nova.compute.manager [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: f269844b-a9b4-40a2-8ba4-a62ee59b4e40] Took 21.44 seconds to build instance. [ 1933.742819] env[62405]: DEBUG nova.compute.manager [req-b56bfebf-c2d2-45c4-a6cd-6a1287f4c57c req-33312b70-fcc6-4ddf-83ac-cd571fc6ff40 service nova] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] Received event network-vif-plugged-d440b728-2371-4e75-bb9f-2330f0318cae {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1933.743050] env[62405]: DEBUG oslo_concurrency.lockutils [req-b56bfebf-c2d2-45c4-a6cd-6a1287f4c57c req-33312b70-fcc6-4ddf-83ac-cd571fc6ff40 service nova] Acquiring lock "556e1bca-f2f1-4200-96df-997d48ce5a15-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1933.743273] env[62405]: DEBUG oslo_concurrency.lockutils [req-b56bfebf-c2d2-45c4-a6cd-6a1287f4c57c req-33312b70-fcc6-4ddf-83ac-cd571fc6ff40 service nova] Lock "556e1bca-f2f1-4200-96df-997d48ce5a15-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1933.743442] env[62405]: DEBUG oslo_concurrency.lockutils [req-b56bfebf-c2d2-45c4-a6cd-6a1287f4c57c req-33312b70-fcc6-4ddf-83ac-cd571fc6ff40 service nova] Lock "556e1bca-f2f1-4200-96df-997d48ce5a15-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1933.743607] env[62405]: DEBUG nova.compute.manager [req-b56bfebf-c2d2-45c4-a6cd-6a1287f4c57c req-33312b70-fcc6-4ddf-83ac-cd571fc6ff40 service nova] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] No waiting events found dispatching network-vif-plugged-d440b728-2371-4e75-bb9f-2330f0318cae {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1933.743771] env[62405]: WARNING nova.compute.manager [req-b56bfebf-c2d2-45c4-a6cd-6a1287f4c57c req-33312b70-fcc6-4ddf-83ac-cd571fc6ff40 service nova] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] Received unexpected event network-vif-plugged-d440b728-2371-4e75-bb9f-2330f0318cae for instance with vm_state building and task_state spawning. [ 1933.773106] env[62405]: DEBUG oslo_vmware.api [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1947863, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.349318} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1933.773451] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] a91a6d04-2ec0-4568-bdb3-732d148644de/a91a6d04-2ec0-4568-bdb3-732d148644de.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1933.773698] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: a91a6d04-2ec0-4568-bdb3-732d148644de] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1933.773981] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fc3f3252-730b-40d2-a579-6c51abca8eaa {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.782741] env[62405]: DEBUG oslo_vmware.api [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Waiting for the task: (returnval){ [ 1933.782741] env[62405]: value = "task-1947868" [ 1933.782741] env[62405]: _type = "Task" [ 1933.782741] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1933.792328] env[62405]: DEBUG oslo_vmware.api [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1947868, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1933.796991] env[62405]: DEBUG nova.network.neutron [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] Successfully updated port: d440b728-2371-4e75-bb9f-2330f0318cae {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1933.825942] env[62405]: DEBUG oslo_vmware.api [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]528044d6-45d2-ff3e-bbff-741150d52485, 'name': SearchDatastore_Task, 'duration_secs': 0.057829} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1933.826424] env[62405]: DEBUG oslo_concurrency.lockutils [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1933.826692] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 0d2b305d-d754-413c-afdf-3a2e8f143891/0d2b305d-d754-413c-afdf-3a2e8f143891.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1933.826944] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a2902c5a-693d-43f5-897d-c13ffd879d9a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.834120] env[62405]: DEBUG oslo_vmware.api [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Waiting for the task: (returnval){ [ 1933.834120] env[62405]: value = "task-1947869" [ 1933.834120] env[62405]: _type = "Task" [ 1933.834120] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1933.841594] env[62405]: DEBUG oslo_vmware.api [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1947869, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1933.850812] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8a887fc5-23f0-42a6-9c1b-69539adbf33e tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Lock "06dbb3e0-876e-4290-81f5-6f95f9d5cb37" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.783s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1934.178050] env[62405]: DEBUG oslo_vmware.api [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1947867, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1934.186467] env[62405]: DEBUG oslo_concurrency.lockutils [None req-76c716bc-47f5-4c3f-a679-038147f962c3 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Lock "f269844b-a9b4-40a2-8ba4-a62ee59b4e40" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.965s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1934.299500] env[62405]: DEBUG oslo_concurrency.lockutils [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquiring lock "refresh_cache-556e1bca-f2f1-4200-96df-997d48ce5a15" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1934.299730] env[62405]: DEBUG oslo_concurrency.lockutils [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquired lock "refresh_cache-556e1bca-f2f1-4200-96df-997d48ce5a15" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1934.299916] env[62405]: DEBUG nova.network.neutron [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1934.304112] env[62405]: DEBUG oslo_vmware.api [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1947868, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1934.350457] env[62405]: DEBUG oslo_vmware.api [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1947869, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1934.371301] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-516580a3-73ec-4998-b232-e1416929b746 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.379435] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a654503a-67f9-4e68-9059-91232a7d1fe8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.411904] env[62405]: DEBUG nova.objects.instance [None req-ff2d7eb1-9247-4c8a-9e35-458a29fe57b2 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Lazy-loading 'flavor' on Instance uuid d186b2f4-3fd1-44be-b8a4-080972aff3a0 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1934.413926] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85959138-fe85-4b45-83fd-b5c6750278bb {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.422635] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad7cf763-3c27-4298-b830-ea287db962b3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.438195] env[62405]: DEBUG nova.compute.provider_tree [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1934.676635] env[62405]: DEBUG oslo_vmware.api [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1947867, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1934.760868] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Acquiring lock "ca0ff947-1ae0-4f19-ae71-0784f2c20ebe" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1934.761408] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Lock "ca0ff947-1ae0-4f19-ae71-0784f2c20ebe" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1934.796177] env[62405]: DEBUG oslo_vmware.api [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1947868, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.600467} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1934.796425] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: a91a6d04-2ec0-4568-bdb3-732d148644de] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1934.797214] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78041d24-d75e-47bc-a967-71ddae2b55d2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.822025] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: a91a6d04-2ec0-4568-bdb3-732d148644de] Reconfiguring VM instance instance-0000005f to attach disk [datastore1] a91a6d04-2ec0-4568-bdb3-732d148644de/a91a6d04-2ec0-4568-bdb3-732d148644de.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1934.822357] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a9a66a9a-9dbf-4280-a6dd-e76ce5bcc799 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.847543] env[62405]: DEBUG oslo_vmware.api [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1947869, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.534341} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1934.848893] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 0d2b305d-d754-413c-afdf-3a2e8f143891/0d2b305d-d754-413c-afdf-3a2e8f143891.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1934.849179] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 0d2b305d-d754-413c-afdf-3a2e8f143891] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1934.849483] env[62405]: DEBUG oslo_vmware.api [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Waiting for the task: (returnval){ [ 1934.849483] env[62405]: value = "task-1947870" [ 1934.849483] env[62405]: _type = "Task" [ 1934.849483] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1934.849710] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-db3e4fc8-d45f-4e1f-b58d-8befebbfe66f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.860708] env[62405]: DEBUG oslo_vmware.api [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1947870, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1934.861974] env[62405]: DEBUG oslo_vmware.api [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Waiting for the task: (returnval){ [ 1934.861974] env[62405]: value = "task-1947871" [ 1934.861974] env[62405]: _type = "Task" [ 1934.861974] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1934.869844] env[62405]: DEBUG oslo_vmware.api [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1947871, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1934.870716] env[62405]: DEBUG nova.network.neutron [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1934.921110] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ff2d7eb1-9247-4c8a-9e35-458a29fe57b2 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Acquiring lock "refresh_cache-d186b2f4-3fd1-44be-b8a4-080972aff3a0" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1934.921326] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ff2d7eb1-9247-4c8a-9e35-458a29fe57b2 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Acquired lock "refresh_cache-d186b2f4-3fd1-44be-b8a4-080972aff3a0" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1934.921531] env[62405]: DEBUG nova.network.neutron [None req-ff2d7eb1-9247-4c8a-9e35-458a29fe57b2 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1934.921754] env[62405]: DEBUG nova.objects.instance [None req-ff2d7eb1-9247-4c8a-9e35-458a29fe57b2 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Lazy-loading 'info_cache' on Instance uuid d186b2f4-3fd1-44be-b8a4-080972aff3a0 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1934.967280] env[62405]: ERROR nova.scheduler.client.report [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [req-24f39a32-9d66-43b2-801f-9fda5a5f3908] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7d5eded7-a501-4fa6-b1d3-60e273d555d7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-24f39a32-9d66-43b2-801f-9fda5a5f3908"}]} [ 1934.982938] env[62405]: DEBUG nova.scheduler.client.report [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Refreshing inventories for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1934.994950] env[62405]: DEBUG nova.scheduler.client.report [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Updating ProviderTree inventory for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1934.995237] env[62405]: DEBUG nova.compute.provider_tree [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1935.006627] env[62405]: DEBUG nova.scheduler.client.report [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Refreshing aggregate associations for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7, aggregates: None {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1935.023229] env[62405]: DEBUG nova.scheduler.client.report [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Refreshing trait associations for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1935.044951] env[62405]: DEBUG nova.network.neutron [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] Updating instance_info_cache with network_info: [{"id": "d440b728-2371-4e75-bb9f-2330f0318cae", "address": "fa:16:3e:ea:15:ee", "network": {"id": "8e7f7222-48db-4dd5-a9e8-9a6d2b598918", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1945720715-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba9083cddcc24345b6ea5d2cbbbec5ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd440b728-23", "ovs_interfaceid": "d440b728-2371-4e75-bb9f-2330f0318cae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1935.178655] env[62405]: DEBUG oslo_vmware.api [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1947867, 'name': PowerOnVM_Task, 'duration_secs': 1.411129} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1935.181325] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1935.181588] env[62405]: INFO nova.compute.manager [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Took 19.00 seconds to spawn the instance on the hypervisor. [ 1935.181793] env[62405]: DEBUG nova.compute.manager [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1935.184096] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e3fa338-8058-487a-be5b-8e317b7e2414 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.263487] env[62405]: DEBUG nova.compute.manager [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: ca0ff947-1ae0-4f19-ae71-0784f2c20ebe] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1935.305501] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b28e630-27b4-4ca8-b258-f4bd7dc828c4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.313296] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11175188-cb6c-4b30-8c52-129a52377a5e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.343626] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a27aa549-4468-4a90-b76f-f4b148117e82 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.351322] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76c72544-2fa4-4430-91bf-8d9c93aa3f47 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.363201] env[62405]: DEBUG oslo_vmware.api [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1947870, 'name': ReconfigVM_Task, 'duration_secs': 0.446613} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1935.373941] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: a91a6d04-2ec0-4568-bdb3-732d148644de] Reconfigured VM instance instance-0000005f to attach disk [datastore1] a91a6d04-2ec0-4568-bdb3-732d148644de/a91a6d04-2ec0-4568-bdb3-732d148644de.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1935.374838] env[62405]: DEBUG nova.compute.provider_tree [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1935.376664] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-af15b07c-8911-47ed-b1e4-fad4004d4931 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.384137] env[62405]: DEBUG oslo_vmware.api [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1947871, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07912} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1935.385474] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 0d2b305d-d754-413c-afdf-3a2e8f143891] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1935.385829] env[62405]: DEBUG oslo_vmware.api [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Waiting for the task: (returnval){ [ 1935.385829] env[62405]: value = "task-1947872" [ 1935.385829] env[62405]: _type = "Task" [ 1935.385829] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1935.387086] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d25fc969-a38c-4889-b3a5-eaf170084c23 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.410554] env[62405]: DEBUG oslo_vmware.api [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1947872, 'name': Rename_Task} progress is 10%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1935.419404] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 0d2b305d-d754-413c-afdf-3a2e8f143891] Reconfiguring VM instance instance-00000060 to attach disk [datastore1] 0d2b305d-d754-413c-afdf-3a2e8f143891/0d2b305d-d754-413c-afdf-3a2e8f143891.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1935.420118] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4bbb0d19-539a-4baf-9c59-c84790e94e83 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.435588] env[62405]: DEBUG nova.objects.base [None req-ff2d7eb1-9247-4c8a-9e35-458a29fe57b2 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=62405) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1935.442751] env[62405]: DEBUG oslo_vmware.api [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Waiting for the task: (returnval){ [ 1935.442751] env[62405]: value = "task-1947873" [ 1935.442751] env[62405]: _type = "Task" [ 1935.442751] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1935.451923] env[62405]: DEBUG oslo_vmware.api [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1947873, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1935.514688] env[62405]: DEBUG oslo_concurrency.lockutils [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Acquiring lock "1b820a12-4ca5-4b89-9016-81ebac4f1c3b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1935.514973] env[62405]: DEBUG oslo_concurrency.lockutils [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Lock "1b820a12-4ca5-4b89-9016-81ebac4f1c3b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1935.547645] env[62405]: DEBUG oslo_concurrency.lockutils [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Releasing lock "refresh_cache-556e1bca-f2f1-4200-96df-997d48ce5a15" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1935.548931] env[62405]: DEBUG nova.compute.manager [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] Instance network_info: |[{"id": "d440b728-2371-4e75-bb9f-2330f0318cae", "address": "fa:16:3e:ea:15:ee", "network": {"id": "8e7f7222-48db-4dd5-a9e8-9a6d2b598918", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1945720715-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba9083cddcc24345b6ea5d2cbbbec5ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd440b728-23", "ovs_interfaceid": "d440b728-2371-4e75-bb9f-2330f0318cae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1935.548931] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ea:15:ee', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '15538852-1a3f-4f71-b4a9-4923c5837c4f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd440b728-2371-4e75-bb9f-2330f0318cae', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1935.563459] env[62405]: DEBUG oslo.service.loopingcall [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1935.563913] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1935.564321] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-439c1d99-adfe-4604-9224-f415fb43d756 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.590821] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1935.590821] env[62405]: value = "task-1947874" [ 1935.590821] env[62405]: _type = "Task" [ 1935.590821] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1935.599430] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947874, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1935.702214] env[62405]: INFO nova.compute.manager [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Took 43.39 seconds to build instance. [ 1935.792936] env[62405]: DEBUG nova.compute.manager [req-feb016f7-aca7-4e6b-b744-846a7b86ca08 req-0353488b-1e09-4f19-86f8-e208af277061 service nova] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] Received event network-changed-d440b728-2371-4e75-bb9f-2330f0318cae {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1935.793157] env[62405]: DEBUG nova.compute.manager [req-feb016f7-aca7-4e6b-b744-846a7b86ca08 req-0353488b-1e09-4f19-86f8-e208af277061 service nova] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] Refreshing instance network info cache due to event network-changed-d440b728-2371-4e75-bb9f-2330f0318cae. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1935.793377] env[62405]: DEBUG oslo_concurrency.lockutils [req-feb016f7-aca7-4e6b-b744-846a7b86ca08 req-0353488b-1e09-4f19-86f8-e208af277061 service nova] Acquiring lock "refresh_cache-556e1bca-f2f1-4200-96df-997d48ce5a15" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1935.793526] env[62405]: DEBUG oslo_concurrency.lockutils [req-feb016f7-aca7-4e6b-b744-846a7b86ca08 req-0353488b-1e09-4f19-86f8-e208af277061 service nova] Acquired lock "refresh_cache-556e1bca-f2f1-4200-96df-997d48ce5a15" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1935.793690] env[62405]: DEBUG nova.network.neutron [req-feb016f7-aca7-4e6b-b744-846a7b86ca08 req-0353488b-1e09-4f19-86f8-e208af277061 service nova] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] Refreshing network info cache for port d440b728-2371-4e75-bb9f-2330f0318cae {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1935.799060] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1935.902773] env[62405]: ERROR nova.scheduler.client.report [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [req-7a64c562-3bff-4686-8441-b45dfc9b51a5] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7d5eded7-a501-4fa6-b1d3-60e273d555d7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-7a64c562-3bff-4686-8441-b45dfc9b51a5"}]} [ 1935.914518] env[62405]: DEBUG oslo_vmware.api [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1947872, 'name': Rename_Task, 'duration_secs': 0.336385} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1935.914809] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: a91a6d04-2ec0-4568-bdb3-732d148644de] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1935.915075] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-af5aa754-16a1-48b7-98e8-58381e715efe {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.921735] env[62405]: DEBUG oslo_vmware.api [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Waiting for the task: (returnval){ [ 1935.921735] env[62405]: value = "task-1947875" [ 1935.921735] env[62405]: _type = "Task" [ 1935.921735] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1935.926021] env[62405]: DEBUG nova.scheduler.client.report [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Refreshing inventories for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1935.931525] env[62405]: DEBUG oslo_vmware.api [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1947875, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1935.943257] env[62405]: DEBUG nova.scheduler.client.report [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Updating ProviderTree inventory for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1935.943526] env[62405]: DEBUG nova.compute.provider_tree [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1935.956042] env[62405]: DEBUG oslo_vmware.api [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1947873, 'name': ReconfigVM_Task, 'duration_secs': 0.284279} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1935.957155] env[62405]: DEBUG nova.scheduler.client.report [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Refreshing aggregate associations for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7, aggregates: None {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1935.959265] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 0d2b305d-d754-413c-afdf-3a2e8f143891] Reconfigured VM instance instance-00000060 to attach disk [datastore1] 0d2b305d-d754-413c-afdf-3a2e8f143891/0d2b305d-d754-413c-afdf-3a2e8f143891.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1935.960090] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2b72fc03-bce3-4f7f-baac-5ad163c337e6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.966802] env[62405]: DEBUG oslo_vmware.api [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Waiting for the task: (returnval){ [ 1935.966802] env[62405]: value = "task-1947876" [ 1935.966802] env[62405]: _type = "Task" [ 1935.966802] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1935.974808] env[62405]: DEBUG oslo_vmware.api [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1947876, 'name': Rename_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1935.977585] env[62405]: DEBUG nova.scheduler.client.report [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Refreshing trait associations for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1936.017239] env[62405]: DEBUG nova.compute.manager [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: 1b820a12-4ca5-4b89-9016-81ebac4f1c3b] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1936.108166] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947874, 'name': CreateVM_Task} progress is 25%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1936.206695] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b848031f-d987-4d9e-8a04-bdf19bd22cd4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Lock "6fcfada3-d73a-4814-bf45-d34b26d76d4a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 44.901s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1936.261617] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f6f362e-a397-4374-84c9-4448254e01eb {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.269320] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f81ddbe4-e36a-4d3e-b711-0d42dcd63a33 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.298206] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16900e80-5ffe-44c5-aed9-e950ba07399e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.307456] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82f9ff8f-dbd7-4764-8c00-4439d3193e63 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.321541] env[62405]: DEBUG nova.compute.provider_tree [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1936.429244] env[62405]: DEBUG nova.network.neutron [None req-ff2d7eb1-9247-4c8a-9e35-458a29fe57b2 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Updating instance_info_cache with network_info: [{"id": "995727bb-89db-40f7-a02b-916afa2c9641", "address": "fa:16:3e:15:17:ac", "network": {"id": "2019f333-b70a-4976-97ee-8748220e1f48", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-558435229-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.220", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "633b4e729a054bc69593b789af9ee070", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb143ef7-8271-4a8a-a4aa-8eba9a89f6a1", "external-id": "nsx-vlan-transportzone-504", "segmentation_id": 504, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap995727bb-89", "ovs_interfaceid": "995727bb-89db-40f7-a02b-916afa2c9641", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1936.436031] env[62405]: DEBUG oslo_vmware.api [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1947875, 'name': PowerOnVM_Task} progress is 81%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1936.480021] env[62405]: DEBUG oslo_vmware.api [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1947876, 'name': Rename_Task, 'duration_secs': 0.154246} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1936.480845] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 0d2b305d-d754-413c-afdf-3a2e8f143891] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1936.481118] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-71c457d6-3aee-4418-b94a-acb06189e639 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.487312] env[62405]: DEBUG oslo_vmware.api [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Waiting for the task: (returnval){ [ 1936.487312] env[62405]: value = "task-1947877" [ 1936.487312] env[62405]: _type = "Task" [ 1936.487312] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1936.497691] env[62405]: DEBUG oslo_vmware.api [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1947877, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1936.523140] env[62405]: DEBUG nova.network.neutron [req-feb016f7-aca7-4e6b-b744-846a7b86ca08 req-0353488b-1e09-4f19-86f8-e208af277061 service nova] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] Updated VIF entry in instance network info cache for port d440b728-2371-4e75-bb9f-2330f0318cae. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1936.523140] env[62405]: DEBUG nova.network.neutron [req-feb016f7-aca7-4e6b-b744-846a7b86ca08 req-0353488b-1e09-4f19-86f8-e208af277061 service nova] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] Updating instance_info_cache with network_info: [{"id": "d440b728-2371-4e75-bb9f-2330f0318cae", "address": "fa:16:3e:ea:15:ee", "network": {"id": "8e7f7222-48db-4dd5-a9e8-9a6d2b598918", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1945720715-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba9083cddcc24345b6ea5d2cbbbec5ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd440b728-23", "ovs_interfaceid": "d440b728-2371-4e75-bb9f-2330f0318cae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1936.545658] env[62405]: DEBUG oslo_concurrency.lockutils [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1936.603085] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947874, 'name': CreateVM_Task, 'duration_secs': 0.674213} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1936.608092] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1936.608092] env[62405]: DEBUG oslo_concurrency.lockutils [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1936.608092] env[62405]: DEBUG oslo_concurrency.lockutils [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1936.608092] env[62405]: DEBUG oslo_concurrency.lockutils [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1936.608092] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5d8f5325-7c40-4040-82b4-98a77d98b8e9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.612179] env[62405]: DEBUG oslo_vmware.api [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Waiting for the task: (returnval){ [ 1936.612179] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]523d3daa-61da-acc0-09d2-3315ef364222" [ 1936.612179] env[62405]: _type = "Task" [ 1936.612179] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1936.620975] env[62405]: DEBUG oslo_vmware.api [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]523d3daa-61da-acc0-09d2-3315ef364222, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1936.933971] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ff2d7eb1-9247-4c8a-9e35-458a29fe57b2 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Releasing lock "refresh_cache-d186b2f4-3fd1-44be-b8a4-080972aff3a0" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1936.935366] env[62405]: DEBUG oslo_vmware.api [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1947875, 'name': PowerOnVM_Task, 'duration_secs': 0.999542} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1936.935799] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: a91a6d04-2ec0-4568-bdb3-732d148644de] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1936.936013] env[62405]: INFO nova.compute.manager [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: a91a6d04-2ec0-4568-bdb3-732d148644de] Took 11.76 seconds to spawn the instance on the hypervisor. [ 1936.936196] env[62405]: DEBUG nova.compute.manager [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: a91a6d04-2ec0-4568-bdb3-732d148644de] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1936.936943] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79a9d7bd-561c-4d22-a86f-ef9a41b2f23d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.997388] env[62405]: DEBUG oslo_vmware.api [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1947877, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1937.027895] env[62405]: DEBUG oslo_concurrency.lockutils [req-feb016f7-aca7-4e6b-b744-846a7b86ca08 req-0353488b-1e09-4f19-86f8-e208af277061 service nova] Releasing lock "refresh_cache-556e1bca-f2f1-4200-96df-997d48ce5a15" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1937.123060] env[62405]: DEBUG oslo_vmware.api [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]523d3daa-61da-acc0-09d2-3315ef364222, 'name': SearchDatastore_Task, 'duration_secs': 0.011687} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1937.123402] env[62405]: DEBUG oslo_concurrency.lockutils [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1937.123766] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1937.123882] env[62405]: DEBUG oslo_concurrency.lockutils [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1937.124023] env[62405]: DEBUG oslo_concurrency.lockutils [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1937.124211] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1937.124480] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-80c1f3b1-bff0-429a-9e97-0a0b512c1e8d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1937.139579] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1937.139749] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1937.140482] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4c329675-45b5-4b65-a799-0a6deb125a88 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1937.146343] env[62405]: DEBUG oslo_vmware.api [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Waiting for the task: (returnval){ [ 1937.146343] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52c774e6-b04f-2739-0819-2c997cf20e18" [ 1937.146343] env[62405]: _type = "Task" [ 1937.146343] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1937.156060] env[62405]: DEBUG oslo_vmware.api [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52c774e6-b04f-2739-0819-2c997cf20e18, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1937.498949] env[62405]: DEBUG oslo_vmware.api [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1947877, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1937.658413] env[62405]: DEBUG oslo_vmware.api [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52c774e6-b04f-2739-0819-2c997cf20e18, 'name': SearchDatastore_Task, 'duration_secs': 0.011322} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1937.659195] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0b8cc37a-3073-45fc-9962-15361bbcbf4d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1937.664603] env[62405]: DEBUG oslo_vmware.api [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Waiting for the task: (returnval){ [ 1937.664603] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52a1e8ee-98ea-0c33-8b1f-5da8a22f1105" [ 1937.664603] env[62405]: _type = "Task" [ 1937.664603] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1937.672355] env[62405]: DEBUG oslo_vmware.api [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52a1e8ee-98ea-0c33-8b1f-5da8a22f1105, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1937.880839] env[62405]: DEBUG nova.compute.manager [req-7eec0e22-e4ba-46bc-9c88-e47677f6a736 req-09486777-712c-4f70-9800-15cd642e1961 service nova] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Received event network-changed-e84f02c8-cde2-4f59-88cd-ef80e8cc1bba {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1937.880945] env[62405]: DEBUG nova.compute.manager [req-7eec0e22-e4ba-46bc-9c88-e47677f6a736 req-09486777-712c-4f70-9800-15cd642e1961 service nova] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Refreshing instance network info cache due to event network-changed-e84f02c8-cde2-4f59-88cd-ef80e8cc1bba. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1937.881195] env[62405]: DEBUG oslo_concurrency.lockutils [req-7eec0e22-e4ba-46bc-9c88-e47677f6a736 req-09486777-712c-4f70-9800-15cd642e1961 service nova] Acquiring lock "refresh_cache-6fcfada3-d73a-4814-bf45-d34b26d76d4a" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1937.881324] env[62405]: DEBUG oslo_concurrency.lockutils [req-7eec0e22-e4ba-46bc-9c88-e47677f6a736 req-09486777-712c-4f70-9800-15cd642e1961 service nova] Acquired lock "refresh_cache-6fcfada3-d73a-4814-bf45-d34b26d76d4a" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1937.881476] env[62405]: DEBUG nova.network.neutron [req-7eec0e22-e4ba-46bc-9c88-e47677f6a736 req-09486777-712c-4f70-9800-15cd642e1961 service nova] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Refreshing network info cache for port e84f02c8-cde2-4f59-88cd-ef80e8cc1bba {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1937.940090] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff2d7eb1-9247-4c8a-9e35-458a29fe57b2 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1937.940207] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ad090c70-18dd-496b-b0be-ec6e307df880 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1937.947851] env[62405]: DEBUG oslo_vmware.api [None req-ff2d7eb1-9247-4c8a-9e35-458a29fe57b2 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Waiting for the task: (returnval){ [ 1937.947851] env[62405]: value = "task-1947878" [ 1937.947851] env[62405]: _type = "Task" [ 1937.947851] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1937.958094] env[62405]: DEBUG oslo_vmware.api [None req-ff2d7eb1-9247-4c8a-9e35-458a29fe57b2 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Task: {'id': task-1947878, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1937.998800] env[62405]: DEBUG oslo_vmware.api [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1947877, 'name': PowerOnVM_Task, 'duration_secs': 1.05386} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1937.998800] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 0d2b305d-d754-413c-afdf-3a2e8f143891] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1937.998961] env[62405]: INFO nova.compute.manager [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 0d2b305d-d754-413c-afdf-3a2e8f143891] Took 10.30 seconds to spawn the instance on the hypervisor. [ 1937.999123] env[62405]: DEBUG nova.compute.manager [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 0d2b305d-d754-413c-afdf-3a2e8f143891] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1937.999987] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ccc7fa5-948a-41fe-943d-0ffd3816ca69 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1938.176156] env[62405]: DEBUG oslo_vmware.api [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52a1e8ee-98ea-0c33-8b1f-5da8a22f1105, 'name': SearchDatastore_Task, 'duration_secs': 0.009684} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1938.176310] env[62405]: DEBUG oslo_concurrency.lockutils [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1938.176969] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 556e1bca-f2f1-4200-96df-997d48ce5a15/556e1bca-f2f1-4200-96df-997d48ce5a15.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1938.176969] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1e77e766-5550-45c9-aca6-bfdac4ea6055 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1938.183609] env[62405]: DEBUG oslo_vmware.api [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Waiting for the task: (returnval){ [ 1938.183609] env[62405]: value = "task-1947879" [ 1938.183609] env[62405]: _type = "Task" [ 1938.183609] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1938.191012] env[62405]: DEBUG oslo_vmware.api [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947879, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1938.460562] env[62405]: DEBUG oslo_vmware.api [None req-ff2d7eb1-9247-4c8a-9e35-458a29fe57b2 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Task: {'id': task-1947878, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1938.610284] env[62405]: DEBUG nova.network.neutron [req-7eec0e22-e4ba-46bc-9c88-e47677f6a736 req-09486777-712c-4f70-9800-15cd642e1961 service nova] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Updated VIF entry in instance network info cache for port e84f02c8-cde2-4f59-88cd-ef80e8cc1bba. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1938.610679] env[62405]: DEBUG nova.network.neutron [req-7eec0e22-e4ba-46bc-9c88-e47677f6a736 req-09486777-712c-4f70-9800-15cd642e1961 service nova] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Updating instance_info_cache with network_info: [{"id": "e84f02c8-cde2-4f59-88cd-ef80e8cc1bba", "address": "fa:16:3e:f1:09:bd", "network": {"id": "feb681f7-0a8f-4000-89ec-88a027ee7f15", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-478938422-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.209", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28cfe90f16b140018a5802c02f751d9c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c894ab55-c869-4530-9702-cb46d173ce94", "external-id": "nsx-vlan-transportzone-792", "segmentation_id": 792, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape84f02c8-cd", "ovs_interfaceid": "e84f02c8-cde2-4f59-88cd-ef80e8cc1bba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1938.693156] env[62405]: DEBUG oslo_vmware.api [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947879, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1938.958935] env[62405]: DEBUG oslo_vmware.api [None req-ff2d7eb1-9247-4c8a-9e35-458a29fe57b2 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Task: {'id': task-1947878, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1939.193912] env[62405]: DEBUG oslo_vmware.api [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947879, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1939.459452] env[62405]: DEBUG oslo_vmware.api [None req-ff2d7eb1-9247-4c8a-9e35-458a29fe57b2 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Task: {'id': task-1947878, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1939.695174] env[62405]: DEBUG oslo_vmware.api [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947879, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1939.960663] env[62405]: DEBUG oslo_vmware.api [None req-ff2d7eb1-9247-4c8a-9e35-458a29fe57b2 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Task: {'id': task-1947878, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1940.087608] env[62405]: DEBUG nova.scheduler.client.report [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Updated inventory for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with generation 150 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1940.087608] env[62405]: DEBUG nova.compute.provider_tree [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Updating resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 generation from 150 to 151 during operation: update_inventory {{(pid=62405) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1940.087608] env[62405]: DEBUG nova.compute.provider_tree [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1940.114125] env[62405]: DEBUG oslo_concurrency.lockutils [req-7eec0e22-e4ba-46bc-9c88-e47677f6a736 req-09486777-712c-4f70-9800-15cd642e1961 service nova] Releasing lock "refresh_cache-6fcfada3-d73a-4814-bf45-d34b26d76d4a" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1940.195567] env[62405]: DEBUG oslo_vmware.api [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947879, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1940.458884] env[62405]: INFO nova.compute.manager [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: a91a6d04-2ec0-4568-bdb3-732d148644de] Took 27.94 seconds to build instance. [ 1940.463910] env[62405]: DEBUG oslo_vmware.api [None req-ff2d7eb1-9247-4c8a-9e35-458a29fe57b2 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Task: {'id': task-1947878, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1940.525499] env[62405]: INFO nova.compute.manager [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 0d2b305d-d754-413c-afdf-3a2e8f143891] Took 27.40 seconds to build instance. [ 1940.592716] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 10.559s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1940.593338] env[62405]: DEBUG nova.compute.manager [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: ec0a05fc-4a11-4e07-a03c-e357a7a750ab] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1940.595812] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.376s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1940.597282] env[62405]: INFO nova.compute.claims [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] [instance: 1f8293f9-5fba-4bf4-bf7c-65837c1092a0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1940.695609] env[62405]: DEBUG oslo_vmware.api [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947879, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1940.964321] env[62405]: DEBUG oslo_vmware.api [None req-ff2d7eb1-9247-4c8a-9e35-458a29fe57b2 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Task: {'id': task-1947878, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1940.964772] env[62405]: DEBUG oslo_concurrency.lockutils [None req-55a4b259-877e-4cf3-8d7e-b03d5e13a0c9 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Lock "a91a6d04-2ec0-4568-bdb3-732d148644de" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.451s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1941.024701] env[62405]: INFO nova.compute.manager [None req-cd885ba3-5635-4b36-b484-c2d1c596439a tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 0d2b305d-d754-413c-afdf-3a2e8f143891] Rescuing [ 1941.025098] env[62405]: DEBUG oslo_concurrency.lockutils [None req-cd885ba3-5635-4b36-b484-c2d1c596439a tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Acquiring lock "refresh_cache-0d2b305d-d754-413c-afdf-3a2e8f143891" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1941.025313] env[62405]: DEBUG oslo_concurrency.lockutils [None req-cd885ba3-5635-4b36-b484-c2d1c596439a tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Acquired lock "refresh_cache-0d2b305d-d754-413c-afdf-3a2e8f143891" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1941.025537] env[62405]: DEBUG nova.network.neutron [None req-cd885ba3-5635-4b36-b484-c2d1c596439a tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 0d2b305d-d754-413c-afdf-3a2e8f143891] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1941.027148] env[62405]: DEBUG oslo_concurrency.lockutils [None req-45e6b86f-dd97-4d47-9c37-b7a405dd4486 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Lock "0d2b305d-d754-413c-afdf-3a2e8f143891" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.910s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1941.102056] env[62405]: DEBUG nova.compute.utils [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1941.105631] env[62405]: DEBUG nova.compute.manager [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: ec0a05fc-4a11-4e07-a03c-e357a7a750ab] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1941.105795] env[62405]: DEBUG nova.network.neutron [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: ec0a05fc-4a11-4e07-a03c-e357a7a750ab] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1941.149956] env[62405]: DEBUG nova.policy [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f5f866535fb94dd0b0ddddddd7da60b2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '41626e27199f4370a2554bb243a72d41', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1941.200507] env[62405]: DEBUG oslo_vmware.api [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947879, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1941.431937] env[62405]: DEBUG nova.network.neutron [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: ec0a05fc-4a11-4e07-a03c-e357a7a750ab] Successfully created port: 1336ca88-2020-4b2c-b082-e45e1fe68506 {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1941.466433] env[62405]: DEBUG oslo_vmware.api [None req-ff2d7eb1-9247-4c8a-9e35-458a29fe57b2 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Task: {'id': task-1947878, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1941.612387] env[62405]: DEBUG nova.compute.manager [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: ec0a05fc-4a11-4e07-a03c-e357a7a750ab] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1941.701023] env[62405]: DEBUG oslo_vmware.api [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947879, 'name': CopyVirtualDisk_Task, 'duration_secs': 3.342035} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1941.701023] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 556e1bca-f2f1-4200-96df-997d48ce5a15/556e1bca-f2f1-4200-96df-997d48ce5a15.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1941.701896] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1941.701896] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d93881e2-842f-42d8-b057-4101f6f246bd {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.709131] env[62405]: DEBUG oslo_vmware.api [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Waiting for the task: (returnval){ [ 1941.709131] env[62405]: value = "task-1947880" [ 1941.709131] env[62405]: _type = "Task" [ 1941.709131] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1941.719104] env[62405]: DEBUG oslo_vmware.api [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947880, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1941.926810] env[62405]: DEBUG nova.network.neutron [None req-cd885ba3-5635-4b36-b484-c2d1c596439a tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 0d2b305d-d754-413c-afdf-3a2e8f143891] Updating instance_info_cache with network_info: [{"id": "b23a9aa4-c4ec-442c-abdc-2c7b2d5a9961", "address": "fa:16:3e:77:ff:03", "network": {"id": "bb161d6c-1986-486e-a6b7-5b1dacc985ee", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-590658377-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7541d8c77a3f434094bc30a4d402bfcb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb23a9aa4-c4", "ovs_interfaceid": "b23a9aa4-c4ec-442c-abdc-2c7b2d5a9961", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1941.962247] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-793f18e8-7ca8-420a-9e06-4fbb005c0ba0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.975018] env[62405]: DEBUG oslo_vmware.api [None req-ff2d7eb1-9247-4c8a-9e35-458a29fe57b2 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Task: {'id': task-1947878, 'name': PowerOnVM_Task, 'duration_secs': 3.611506} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1941.975390] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff2d7eb1-9247-4c8a-9e35-458a29fe57b2 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1941.975605] env[62405]: DEBUG nova.compute.manager [None req-ff2d7eb1-9247-4c8a-9e35-458a29fe57b2 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1941.976736] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bea8e1ad-8f09-4311-8cbe-537dfa9dcd1f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.980352] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab6f2b23-86c9-4d08-9cfd-d4ad343f720a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1942.014399] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33ca992d-77f8-46bf-966d-f442e65a981f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1942.022878] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddebf0bd-2ea8-4ab5-92df-638bc7590275 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1942.036503] env[62405]: DEBUG nova.compute.provider_tree [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1942.220190] env[62405]: DEBUG oslo_vmware.api [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947880, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.320881} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1942.220190] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1942.220579] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22398840-3497-4106-b8c2-f4cf7e112847 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1942.244909] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] Reconfiguring VM instance instance-00000061 to attach disk [datastore1] 556e1bca-f2f1-4200-96df-997d48ce5a15/556e1bca-f2f1-4200-96df-997d48ce5a15.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1942.246080] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-762aa4a8-a39a-42e9-bb93-119a163a8f2c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1942.266266] env[62405]: DEBUG oslo_vmware.api [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Waiting for the task: (returnval){ [ 1942.266266] env[62405]: value = "task-1947881" [ 1942.266266] env[62405]: _type = "Task" [ 1942.266266] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1942.274570] env[62405]: DEBUG oslo_vmware.api [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947881, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1942.431316] env[62405]: DEBUG oslo_concurrency.lockutils [None req-cd885ba3-5635-4b36-b484-c2d1c596439a tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Releasing lock "refresh_cache-0d2b305d-d754-413c-afdf-3a2e8f143891" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1942.539211] env[62405]: DEBUG nova.scheduler.client.report [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1942.775742] env[62405]: DEBUG oslo_vmware.api [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947881, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1942.988331] env[62405]: DEBUG nova.compute.manager [req-1a523306-6223-4936-b99b-1cdcb42c2dc6 req-00c62258-bf9c-49dd-b974-4a00104d1941 service nova] [instance: ec0a05fc-4a11-4e07-a03c-e357a7a750ab] Received event network-vif-plugged-1336ca88-2020-4b2c-b082-e45e1fe68506 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1942.988571] env[62405]: DEBUG oslo_concurrency.lockutils [req-1a523306-6223-4936-b99b-1cdcb42c2dc6 req-00c62258-bf9c-49dd-b974-4a00104d1941 service nova] Acquiring lock "ec0a05fc-4a11-4e07-a03c-e357a7a750ab-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1942.988781] env[62405]: DEBUG oslo_concurrency.lockutils [req-1a523306-6223-4936-b99b-1cdcb42c2dc6 req-00c62258-bf9c-49dd-b974-4a00104d1941 service nova] Lock "ec0a05fc-4a11-4e07-a03c-e357a7a750ab-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1942.988970] env[62405]: DEBUG oslo_concurrency.lockutils [req-1a523306-6223-4936-b99b-1cdcb42c2dc6 req-00c62258-bf9c-49dd-b974-4a00104d1941 service nova] Lock "ec0a05fc-4a11-4e07-a03c-e357a7a750ab-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1942.989160] env[62405]: DEBUG nova.compute.manager [req-1a523306-6223-4936-b99b-1cdcb42c2dc6 req-00c62258-bf9c-49dd-b974-4a00104d1941 service nova] [instance: ec0a05fc-4a11-4e07-a03c-e357a7a750ab] No waiting events found dispatching network-vif-plugged-1336ca88-2020-4b2c-b082-e45e1fe68506 {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1942.989330] env[62405]: WARNING nova.compute.manager [req-1a523306-6223-4936-b99b-1cdcb42c2dc6 req-00c62258-bf9c-49dd-b974-4a00104d1941 service nova] [instance: ec0a05fc-4a11-4e07-a03c-e357a7a750ab] Received unexpected event network-vif-plugged-1336ca88-2020-4b2c-b082-e45e1fe68506 for instance with vm_state building and task_state spawning. [ 1943.040368] env[62405]: DEBUG nova.network.neutron [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: ec0a05fc-4a11-4e07-a03c-e357a7a750ab] Successfully updated port: 1336ca88-2020-4b2c-b082-e45e1fe68506 {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1943.047480] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.451s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1943.047480] env[62405]: DEBUG nova.compute.manager [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] [instance: 1f8293f9-5fba-4bf4-bf7c-65837c1092a0] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1943.052024] env[62405]: DEBUG oslo_concurrency.lockutils [None req-efd16a8c-b93f-464f-93c8-b2b1289fbd6a tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.015s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1943.052024] env[62405]: DEBUG nova.objects.instance [None req-efd16a8c-b93f-464f-93c8-b2b1289fbd6a tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Lazy-loading 'resources' on Instance uuid 3b9a6a82-a426-4802-9640-5b39e5e0ff49 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1943.127209] env[62405]: DEBUG nova.compute.manager [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: ec0a05fc-4a11-4e07-a03c-e357a7a750ab] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1943.149031] env[62405]: DEBUG nova.virt.hardware [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1943.149279] env[62405]: DEBUG nova.virt.hardware [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1943.149511] env[62405]: DEBUG nova.virt.hardware [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1943.149741] env[62405]: DEBUG nova.virt.hardware [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1943.149899] env[62405]: DEBUG nova.virt.hardware [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1943.150378] env[62405]: DEBUG nova.virt.hardware [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1943.150644] env[62405]: DEBUG nova.virt.hardware [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1943.150937] env[62405]: DEBUG nova.virt.hardware [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1943.151137] env[62405]: DEBUG nova.virt.hardware [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1943.151311] env[62405]: DEBUG nova.virt.hardware [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1943.151490] env[62405]: DEBUG nova.virt.hardware [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1943.152681] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f61c6f15-eefc-4ac8-9fbc-f25dd558b0ee {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1943.160924] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96ed9dc7-0cbc-4798-a4e0-c6ac4edd95d6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1943.276513] env[62405]: DEBUG oslo_vmware.api [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947881, 'name': ReconfigVM_Task, 'duration_secs': 0.977888} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1943.276751] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] Reconfigured VM instance instance-00000061 to attach disk [datastore1] 556e1bca-f2f1-4200-96df-997d48ce5a15/556e1bca-f2f1-4200-96df-997d48ce5a15.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1943.277378] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4364ce11-87f1-4f5a-9e95-71e127feac67 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1943.283524] env[62405]: DEBUG oslo_vmware.api [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Waiting for the task: (returnval){ [ 1943.283524] env[62405]: value = "task-1947882" [ 1943.283524] env[62405]: _type = "Task" [ 1943.283524] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1943.291489] env[62405]: DEBUG oslo_vmware.api [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947882, 'name': Rename_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1943.547163] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquiring lock "refresh_cache-ec0a05fc-4a11-4e07-a03c-e357a7a750ab" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1943.547163] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquired lock "refresh_cache-ec0a05fc-4a11-4e07-a03c-e357a7a750ab" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1943.547252] env[62405]: DEBUG nova.network.neutron [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: ec0a05fc-4a11-4e07-a03c-e357a7a750ab] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1943.552162] env[62405]: DEBUG nova.compute.utils [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1943.553355] env[62405]: DEBUG nova.compute.manager [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] [instance: 1f8293f9-5fba-4bf4-bf7c-65837c1092a0] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1943.553524] env[62405]: DEBUG nova.network.neutron [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] [instance: 1f8293f9-5fba-4bf4-bf7c-65837c1092a0] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1943.597573] env[62405]: DEBUG nova.policy [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '04acf6ff561c4637b94dff6b1425b362', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '77738cedb721478cba2cf27fa227bb3c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1943.796977] env[62405]: DEBUG oslo_vmware.api [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947882, 'name': Rename_Task, 'duration_secs': 0.148259} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1943.797345] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1943.797648] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f6b7226c-bb30-4b11-bacc-6561edee5400 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1943.804654] env[62405]: DEBUG oslo_vmware.api [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Waiting for the task: (returnval){ [ 1943.804654] env[62405]: value = "task-1947883" [ 1943.804654] env[62405]: _type = "Task" [ 1943.804654] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1943.814195] env[62405]: DEBUG oslo_vmware.api [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947883, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1943.867472] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7fd0453-d426-4e6b-b942-babbb094a613 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1943.877868] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc14c5e9-8fbf-45ba-a000-122e61b3486e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1943.913827] env[62405]: DEBUG nova.network.neutron [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] [instance: 1f8293f9-5fba-4bf4-bf7c-65837c1092a0] Successfully created port: 2e77c195-607d-43f7-a712-00157b5b9e01 {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1943.916777] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6a11462-4d05-40bb-978c-bd161f06137b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1943.926125] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f334a8ef-6f55-4103-aa5c-479b02a92ec3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1943.944386] env[62405]: DEBUG nova.compute.provider_tree [None req-efd16a8c-b93f-464f-93c8-b2b1289fbd6a tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1943.968825] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd885ba3-5635-4b36-b484-c2d1c596439a tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 0d2b305d-d754-413c-afdf-3a2e8f143891] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1943.969046] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7eccc2fe-3bf6-4267-adb3-45bcda9fe177 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1943.977076] env[62405]: DEBUG oslo_vmware.api [None req-cd885ba3-5635-4b36-b484-c2d1c596439a tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Waiting for the task: (returnval){ [ 1943.977076] env[62405]: value = "task-1947884" [ 1943.977076] env[62405]: _type = "Task" [ 1943.977076] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1943.987034] env[62405]: DEBUG oslo_vmware.api [None req-cd885ba3-5635-4b36-b484-c2d1c596439a tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1947884, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1944.059676] env[62405]: DEBUG nova.compute.manager [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] [instance: 1f8293f9-5fba-4bf4-bf7c-65837c1092a0] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1944.125311] env[62405]: DEBUG nova.network.neutron [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: ec0a05fc-4a11-4e07-a03c-e357a7a750ab] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1944.316154] env[62405]: DEBUG oslo_vmware.api [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947883, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1944.346461] env[62405]: DEBUG nova.network.neutron [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: ec0a05fc-4a11-4e07-a03c-e357a7a750ab] Updating instance_info_cache with network_info: [{"id": "1336ca88-2020-4b2c-b082-e45e1fe68506", "address": "fa:16:3e:d9:07:e2", "network": {"id": "7398b956-045a-440c-b8fc-34bad57dbc27", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1969082667-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "41626e27199f4370a2554bb243a72d41", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "50171613-b419-45e3-9ada-fcb6cd921428", "external-id": "nsx-vlan-transportzone-914", "segmentation_id": 914, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1336ca88-20", "ovs_interfaceid": "1336ca88-2020-4b2c-b082-e45e1fe68506", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1944.448517] env[62405]: DEBUG nova.scheduler.client.report [None req-efd16a8c-b93f-464f-93c8-b2b1289fbd6a tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1944.487043] env[62405]: DEBUG oslo_vmware.api [None req-cd885ba3-5635-4b36-b484-c2d1c596439a tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1947884, 'name': PowerOffVM_Task, 'duration_secs': 0.359396} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1944.487320] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd885ba3-5635-4b36-b484-c2d1c596439a tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 0d2b305d-d754-413c-afdf-3a2e8f143891] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1944.488140] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1a7cf49-91c6-482a-a6d4-c8f5798e4979 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1944.507328] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95e20002-2f9e-4f47-833d-0e54b4fc1d2c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1944.540297] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd885ba3-5635-4b36-b484-c2d1c596439a tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 0d2b305d-d754-413c-afdf-3a2e8f143891] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1944.540903] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a05c528e-83a1-4613-9b72-b54ba9edb7a1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1944.548047] env[62405]: DEBUG oslo_vmware.api [None req-cd885ba3-5635-4b36-b484-c2d1c596439a tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Waiting for the task: (returnval){ [ 1944.548047] env[62405]: value = "task-1947885" [ 1944.548047] env[62405]: _type = "Task" [ 1944.548047] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1944.556614] env[62405]: DEBUG oslo_vmware.api [None req-cd885ba3-5635-4b36-b484-c2d1c596439a tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1947885, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1944.816660] env[62405]: DEBUG oslo_vmware.api [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947883, 'name': PowerOnVM_Task, 'duration_secs': 1.000621} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1944.817024] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1944.817151] env[62405]: INFO nova.compute.manager [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] Took 14.71 seconds to spawn the instance on the hypervisor. [ 1944.817336] env[62405]: DEBUG nova.compute.manager [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1944.818127] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34bb7ee5-81fd-450a-82bf-957e7c4737d7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1944.849872] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Releasing lock "refresh_cache-ec0a05fc-4a11-4e07-a03c-e357a7a750ab" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1944.850431] env[62405]: DEBUG nova.compute.manager [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: ec0a05fc-4a11-4e07-a03c-e357a7a750ab] Instance network_info: |[{"id": "1336ca88-2020-4b2c-b082-e45e1fe68506", "address": "fa:16:3e:d9:07:e2", "network": {"id": "7398b956-045a-440c-b8fc-34bad57dbc27", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1969082667-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "41626e27199f4370a2554bb243a72d41", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "50171613-b419-45e3-9ada-fcb6cd921428", "external-id": "nsx-vlan-transportzone-914", "segmentation_id": 914, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1336ca88-20", "ovs_interfaceid": "1336ca88-2020-4b2c-b082-e45e1fe68506", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1944.850731] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: ec0a05fc-4a11-4e07-a03c-e357a7a750ab] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d9:07:e2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '50171613-b419-45e3-9ada-fcb6cd921428', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1336ca88-2020-4b2c-b082-e45e1fe68506', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1944.858366] env[62405]: DEBUG oslo.service.loopingcall [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1944.858984] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ec0a05fc-4a11-4e07-a03c-e357a7a750ab] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1944.859311] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4d3e09f4-2346-4fe9-80e0-d1f13d4a69c2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1944.879778] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1944.879778] env[62405]: value = "task-1947886" [ 1944.879778] env[62405]: _type = "Task" [ 1944.879778] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1944.889792] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947886, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1944.954090] env[62405]: DEBUG oslo_concurrency.lockutils [None req-efd16a8c-b93f-464f-93c8-b2b1289fbd6a tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.902s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1944.956992] env[62405]: DEBUG oslo_concurrency.lockutils [None req-772d3a65-2cfe-4e81-8299-60125aaab52f tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.959s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1944.957553] env[62405]: DEBUG nova.objects.instance [None req-772d3a65-2cfe-4e81-8299-60125aaab52f tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Lazy-loading 'resources' on Instance uuid 67bf25ea-5774-4246-a3e6-2aeb0ebf6731 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1945.018605] env[62405]: DEBUG nova.compute.manager [req-40d59c0f-bc2e-497a-ae10-25e5cd126d41 req-4cb024fe-e8ab-4cb4-bb07-3c0fd0cbc6b1 service nova] [instance: ec0a05fc-4a11-4e07-a03c-e357a7a750ab] Received event network-changed-1336ca88-2020-4b2c-b082-e45e1fe68506 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1945.018876] env[62405]: DEBUG nova.compute.manager [req-40d59c0f-bc2e-497a-ae10-25e5cd126d41 req-4cb024fe-e8ab-4cb4-bb07-3c0fd0cbc6b1 service nova] [instance: ec0a05fc-4a11-4e07-a03c-e357a7a750ab] Refreshing instance network info cache due to event network-changed-1336ca88-2020-4b2c-b082-e45e1fe68506. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1945.019104] env[62405]: DEBUG oslo_concurrency.lockutils [req-40d59c0f-bc2e-497a-ae10-25e5cd126d41 req-4cb024fe-e8ab-4cb4-bb07-3c0fd0cbc6b1 service nova] Acquiring lock "refresh_cache-ec0a05fc-4a11-4e07-a03c-e357a7a750ab" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1945.019295] env[62405]: DEBUG oslo_concurrency.lockutils [req-40d59c0f-bc2e-497a-ae10-25e5cd126d41 req-4cb024fe-e8ab-4cb4-bb07-3c0fd0cbc6b1 service nova] Acquired lock "refresh_cache-ec0a05fc-4a11-4e07-a03c-e357a7a750ab" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1945.019524] env[62405]: DEBUG nova.network.neutron [req-40d59c0f-bc2e-497a-ae10-25e5cd126d41 req-4cb024fe-e8ab-4cb4-bb07-3c0fd0cbc6b1 service nova] [instance: ec0a05fc-4a11-4e07-a03c-e357a7a750ab] Refreshing network info cache for port 1336ca88-2020-4b2c-b082-e45e1fe68506 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1945.060401] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd885ba3-5635-4b36-b484-c2d1c596439a tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 0d2b305d-d754-413c-afdf-3a2e8f143891] VM already powered off {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1945.060668] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-cd885ba3-5635-4b36-b484-c2d1c596439a tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 0d2b305d-d754-413c-afdf-3a2e8f143891] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1945.060978] env[62405]: DEBUG oslo_concurrency.lockutils [None req-cd885ba3-5635-4b36-b484-c2d1c596439a tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1945.061090] env[62405]: DEBUG oslo_concurrency.lockutils [None req-cd885ba3-5635-4b36-b484-c2d1c596439a tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1945.061284] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-cd885ba3-5635-4b36-b484-c2d1c596439a tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1945.061798] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e14a8795-090a-49f9-9cf8-2c6a14ad2ed5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1945.069937] env[62405]: DEBUG nova.compute.manager [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] [instance: 1f8293f9-5fba-4bf4-bf7c-65837c1092a0] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1945.073253] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-cd885ba3-5635-4b36-b484-c2d1c596439a tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1945.073451] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-cd885ba3-5635-4b36-b484-c2d1c596439a tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1945.074190] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d5375ff3-ac8c-44d2-9df3-5991ae6cc3ec {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1945.079851] env[62405]: DEBUG oslo_vmware.api [None req-cd885ba3-5635-4b36-b484-c2d1c596439a tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Waiting for the task: (returnval){ [ 1945.079851] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52d43c3c-b4a4-d4a3-f9d9-00c553a6838b" [ 1945.079851] env[62405]: _type = "Task" [ 1945.079851] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1945.091405] env[62405]: DEBUG oslo_vmware.api [None req-cd885ba3-5635-4b36-b484-c2d1c596439a tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52d43c3c-b4a4-d4a3-f9d9-00c553a6838b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1945.093468] env[62405]: DEBUG nova.virt.hardware [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1945.093707] env[62405]: DEBUG nova.virt.hardware [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1945.093866] env[62405]: DEBUG nova.virt.hardware [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1945.094065] env[62405]: DEBUG nova.virt.hardware [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1945.094246] env[62405]: DEBUG nova.virt.hardware [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1945.094434] env[62405]: DEBUG nova.virt.hardware [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1945.094679] env[62405]: DEBUG nova.virt.hardware [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1945.094877] env[62405]: DEBUG nova.virt.hardware [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1945.095106] env[62405]: DEBUG nova.virt.hardware [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1945.095315] env[62405]: DEBUG nova.virt.hardware [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1945.095501] env[62405]: DEBUG nova.virt.hardware [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1945.096485] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89ba2545-1067-4497-a3e0-9973221f63d1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1945.104132] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae0377da-9f99-4774-8ba6-f09feadf65cd {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1945.390435] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947886, 'name': CreateVM_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1945.593683] env[62405]: DEBUG oslo_vmware.api [None req-cd885ba3-5635-4b36-b484-c2d1c596439a tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52d43c3c-b4a4-d4a3-f9d9-00c553a6838b, 'name': SearchDatastore_Task, 'duration_secs': 0.009697} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1945.596879] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-db82b448-0cf2-4f6b-84f3-3fb8ed713686 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1945.603014] env[62405]: DEBUG oslo_vmware.api [None req-cd885ba3-5635-4b36-b484-c2d1c596439a tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Waiting for the task: (returnval){ [ 1945.603014] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52d04ad9-30b0-1ae8-8490-2bb0e982e784" [ 1945.603014] env[62405]: _type = "Task" [ 1945.603014] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1945.612648] env[62405]: DEBUG oslo_vmware.api [None req-cd885ba3-5635-4b36-b484-c2d1c596439a tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52d04ad9-30b0-1ae8-8490-2bb0e982e784, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1945.686734] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f2eef21-4e75-49c5-b175-d52565020bd5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1945.696337] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c55ceaa9-3605-486a-a42f-e3d5e62067ef {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1945.725814] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59dd3f30-2d39-4815-a764-66d67812f48d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1945.733318] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4c9c748-93c0-4770-ad95-0ed049374d6f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1945.749787] env[62405]: DEBUG nova.compute.provider_tree [None req-772d3a65-2cfe-4e81-8299-60125aaab52f tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1945.770065] env[62405]: DEBUG nova.network.neutron [req-40d59c0f-bc2e-497a-ae10-25e5cd126d41 req-4cb024fe-e8ab-4cb4-bb07-3c0fd0cbc6b1 service nova] [instance: ec0a05fc-4a11-4e07-a03c-e357a7a750ab] Updated VIF entry in instance network info cache for port 1336ca88-2020-4b2c-b082-e45e1fe68506. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1945.770400] env[62405]: DEBUG nova.network.neutron [req-40d59c0f-bc2e-497a-ae10-25e5cd126d41 req-4cb024fe-e8ab-4cb4-bb07-3c0fd0cbc6b1 service nova] [instance: ec0a05fc-4a11-4e07-a03c-e357a7a750ab] Updating instance_info_cache with network_info: [{"id": "1336ca88-2020-4b2c-b082-e45e1fe68506", "address": "fa:16:3e:d9:07:e2", "network": {"id": "7398b956-045a-440c-b8fc-34bad57dbc27", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1969082667-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "41626e27199f4370a2554bb243a72d41", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "50171613-b419-45e3-9ada-fcb6cd921428", "external-id": "nsx-vlan-transportzone-914", "segmentation_id": 914, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1336ca88-20", "ovs_interfaceid": "1336ca88-2020-4b2c-b082-e45e1fe68506", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1945.821652] env[62405]: INFO nova.scheduler.client.report [None req-efd16a8c-b93f-464f-93c8-b2b1289fbd6a tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Deleted allocations for instance 3b9a6a82-a426-4802-9640-5b39e5e0ff49 [ 1945.837127] env[62405]: INFO nova.compute.manager [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] Took 30.47 seconds to build instance. [ 1945.891159] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947886, 'name': CreateVM_Task, 'duration_secs': 0.561259} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1945.891391] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ec0a05fc-4a11-4e07-a03c-e357a7a750ab] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1945.891950] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1945.892169] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1945.892458] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1945.892698] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f2fa1abc-21aa-4301-9d62-7d497eb4c68f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1945.897840] env[62405]: DEBUG oslo_vmware.api [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for the task: (returnval){ [ 1945.897840] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52e610fd-45cb-0f29-6ddc-c595ebf7f339" [ 1945.897840] env[62405]: _type = "Task" [ 1945.897840] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1945.905615] env[62405]: DEBUG oslo_vmware.api [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52e610fd-45cb-0f29-6ddc-c595ebf7f339, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1946.012270] env[62405]: DEBUG nova.network.neutron [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] [instance: 1f8293f9-5fba-4bf4-bf7c-65837c1092a0] Successfully updated port: 2e77c195-607d-43f7-a712-00157b5b9e01 {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1946.113689] env[62405]: DEBUG oslo_vmware.api [None req-cd885ba3-5635-4b36-b484-c2d1c596439a tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52d04ad9-30b0-1ae8-8490-2bb0e982e784, 'name': SearchDatastore_Task, 'duration_secs': 0.038298} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1946.113943] env[62405]: DEBUG oslo_concurrency.lockutils [None req-cd885ba3-5635-4b36-b484-c2d1c596439a tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1946.114229] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-cd885ba3-5635-4b36-b484-c2d1c596439a tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 0d2b305d-d754-413c-afdf-3a2e8f143891/e6bba7a8-c2de-41dc-871a-3859bba5f4f9-rescue.vmdk. {{(pid=62405) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1946.114487] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a1e4afd8-824c-4369-b19f-605cc91f4cdc {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.121108] env[62405]: DEBUG oslo_vmware.api [None req-cd885ba3-5635-4b36-b484-c2d1c596439a tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Waiting for the task: (returnval){ [ 1946.121108] env[62405]: value = "task-1947887" [ 1946.121108] env[62405]: _type = "Task" [ 1946.121108] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1946.128430] env[62405]: DEBUG oslo_vmware.api [None req-cd885ba3-5635-4b36-b484-c2d1c596439a tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1947887, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1946.253243] env[62405]: DEBUG nova.scheduler.client.report [None req-772d3a65-2cfe-4e81-8299-60125aaab52f tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1946.272938] env[62405]: DEBUG oslo_concurrency.lockutils [req-40d59c0f-bc2e-497a-ae10-25e5cd126d41 req-4cb024fe-e8ab-4cb4-bb07-3c0fd0cbc6b1 service nova] Releasing lock "refresh_cache-ec0a05fc-4a11-4e07-a03c-e357a7a750ab" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1946.331583] env[62405]: DEBUG oslo_concurrency.lockutils [None req-efd16a8c-b93f-464f-93c8-b2b1289fbd6a tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Lock "3b9a6a82-a426-4802-9640-5b39e5e0ff49" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.315s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1946.339352] env[62405]: DEBUG oslo_concurrency.lockutils [None req-33b76ba1-8888-44ed-a6d1-a7fcdefa6f64 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Lock "556e1bca-f2f1-4200-96df-997d48ce5a15" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.984s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1946.407781] env[62405]: DEBUG oslo_vmware.api [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52e610fd-45cb-0f29-6ddc-c595ebf7f339, 'name': SearchDatastore_Task, 'duration_secs': 0.016887} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1946.408108] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1946.408394] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: ec0a05fc-4a11-4e07-a03c-e357a7a750ab] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1946.408660] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1946.408781] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1946.408959] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1946.410019] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7a3c4a03-350b-4cf2-8e2f-2a5d27fdf5d4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.425811] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1946.426014] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1946.426835] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c602756e-c085-4273-90fa-2894b31a13e7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.432381] env[62405]: DEBUG oslo_vmware.api [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for the task: (returnval){ [ 1946.432381] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]528483fb-6775-4abb-ad30-1598e5d86238" [ 1946.432381] env[62405]: _type = "Task" [ 1946.432381] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1946.439973] env[62405]: DEBUG oslo_vmware.api [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]528483fb-6775-4abb-ad30-1598e5d86238, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1946.517432] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Acquiring lock "refresh_cache-1f8293f9-5fba-4bf4-bf7c-65837c1092a0" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1946.517593] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Acquired lock "refresh_cache-1f8293f9-5fba-4bf4-bf7c-65837c1092a0" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1946.517750] env[62405]: DEBUG nova.network.neutron [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] [instance: 1f8293f9-5fba-4bf4-bf7c-65837c1092a0] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1946.634833] env[62405]: DEBUG oslo_vmware.api [None req-cd885ba3-5635-4b36-b484-c2d1c596439a tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1947887, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1946.759012] env[62405]: DEBUG oslo_concurrency.lockutils [None req-772d3a65-2cfe-4e81-8299-60125aaab52f tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.802s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1946.761773] env[62405]: DEBUG oslo_concurrency.lockutils [None req-50c4893f-da17-4ac3-a25e-8875954fe761 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 19.678s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1946.785525] env[62405]: INFO nova.scheduler.client.report [None req-772d3a65-2cfe-4e81-8299-60125aaab52f tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Deleted allocations for instance 67bf25ea-5774-4246-a3e6-2aeb0ebf6731 [ 1946.945038] env[62405]: DEBUG oslo_vmware.api [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]528483fb-6775-4abb-ad30-1598e5d86238, 'name': SearchDatastore_Task, 'duration_secs': 0.009886} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1946.946308] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f6647ee4-95b7-4acd-bd62-cca13d5d3848 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.952695] env[62405]: DEBUG oslo_vmware.api [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for the task: (returnval){ [ 1946.952695] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5293aa71-455d-da26-9c05-35bbec81effb" [ 1946.952695] env[62405]: _type = "Task" [ 1946.952695] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1946.962026] env[62405]: DEBUG oslo_vmware.api [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5293aa71-455d-da26-9c05-35bbec81effb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1947.051325] env[62405]: DEBUG nova.compute.manager [req-6549792d-4582-4408-8299-ce1583a04d3f req-f0273e02-bb02-4caf-8811-afe3ac59b2e3 service nova] [instance: 1f8293f9-5fba-4bf4-bf7c-65837c1092a0] Received event network-vif-plugged-2e77c195-607d-43f7-a712-00157b5b9e01 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1947.051568] env[62405]: DEBUG oslo_concurrency.lockutils [req-6549792d-4582-4408-8299-ce1583a04d3f req-f0273e02-bb02-4caf-8811-afe3ac59b2e3 service nova] Acquiring lock "1f8293f9-5fba-4bf4-bf7c-65837c1092a0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1947.051781] env[62405]: DEBUG oslo_concurrency.lockutils [req-6549792d-4582-4408-8299-ce1583a04d3f req-f0273e02-bb02-4caf-8811-afe3ac59b2e3 service nova] Lock "1f8293f9-5fba-4bf4-bf7c-65837c1092a0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1947.051953] env[62405]: DEBUG oslo_concurrency.lockutils [req-6549792d-4582-4408-8299-ce1583a04d3f req-f0273e02-bb02-4caf-8811-afe3ac59b2e3 service nova] Lock "1f8293f9-5fba-4bf4-bf7c-65837c1092a0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1947.052205] env[62405]: DEBUG nova.compute.manager [req-6549792d-4582-4408-8299-ce1583a04d3f req-f0273e02-bb02-4caf-8811-afe3ac59b2e3 service nova] [instance: 1f8293f9-5fba-4bf4-bf7c-65837c1092a0] No waiting events found dispatching network-vif-plugged-2e77c195-607d-43f7-a712-00157b5b9e01 {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1947.052292] env[62405]: WARNING nova.compute.manager [req-6549792d-4582-4408-8299-ce1583a04d3f req-f0273e02-bb02-4caf-8811-afe3ac59b2e3 service nova] [instance: 1f8293f9-5fba-4bf4-bf7c-65837c1092a0] Received unexpected event network-vif-plugged-2e77c195-607d-43f7-a712-00157b5b9e01 for instance with vm_state building and task_state spawning. [ 1947.052447] env[62405]: DEBUG nova.compute.manager [req-6549792d-4582-4408-8299-ce1583a04d3f req-f0273e02-bb02-4caf-8811-afe3ac59b2e3 service nova] [instance: 1f8293f9-5fba-4bf4-bf7c-65837c1092a0] Received event network-changed-2e77c195-607d-43f7-a712-00157b5b9e01 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1947.052604] env[62405]: DEBUG nova.compute.manager [req-6549792d-4582-4408-8299-ce1583a04d3f req-f0273e02-bb02-4caf-8811-afe3ac59b2e3 service nova] [instance: 1f8293f9-5fba-4bf4-bf7c-65837c1092a0] Refreshing instance network info cache due to event network-changed-2e77c195-607d-43f7-a712-00157b5b9e01. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1947.052774] env[62405]: DEBUG oslo_concurrency.lockutils [req-6549792d-4582-4408-8299-ce1583a04d3f req-f0273e02-bb02-4caf-8811-afe3ac59b2e3 service nova] Acquiring lock "refresh_cache-1f8293f9-5fba-4bf4-bf7c-65837c1092a0" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1947.061307] env[62405]: DEBUG nova.network.neutron [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] [instance: 1f8293f9-5fba-4bf4-bf7c-65837c1092a0] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1947.132772] env[62405]: DEBUG oslo_vmware.api [None req-cd885ba3-5635-4b36-b484-c2d1c596439a tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1947887, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.884675} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1947.133056] env[62405]: INFO nova.virt.vmwareapi.ds_util [None req-cd885ba3-5635-4b36-b484-c2d1c596439a tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 0d2b305d-d754-413c-afdf-3a2e8f143891/e6bba7a8-c2de-41dc-871a-3859bba5f4f9-rescue.vmdk. [ 1947.133805] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88d374b9-7947-4563-99fe-4ee7793c02d7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.158531] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-cd885ba3-5635-4b36-b484-c2d1c596439a tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 0d2b305d-d754-413c-afdf-3a2e8f143891] Reconfiguring VM instance instance-00000060 to attach disk [datastore1] 0d2b305d-d754-413c-afdf-3a2e8f143891/e6bba7a8-c2de-41dc-871a-3859bba5f4f9-rescue.vmdk or device None with type thin {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1947.158822] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-47b8f28c-494b-4bce-a70b-a3ee209fa562 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.191085] env[62405]: DEBUG oslo_vmware.api [None req-cd885ba3-5635-4b36-b484-c2d1c596439a tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Waiting for the task: (returnval){ [ 1947.191085] env[62405]: value = "task-1947888" [ 1947.191085] env[62405]: _type = "Task" [ 1947.191085] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1947.199494] env[62405]: DEBUG oslo_vmware.api [None req-cd885ba3-5635-4b36-b484-c2d1c596439a tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1947888, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1947.286821] env[62405]: DEBUG nova.network.neutron [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] [instance: 1f8293f9-5fba-4bf4-bf7c-65837c1092a0] Updating instance_info_cache with network_info: [{"id": "2e77c195-607d-43f7-a712-00157b5b9e01", "address": "fa:16:3e:b9:73:f1", "network": {"id": "b807ad3b-1f1d-485f-a8e7-8cfaa4c5ac19", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1212780647-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "77738cedb721478cba2cf27fa227bb3c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8c58d99d-ec12-4fc3-ab39-042b3f8cbb89", "external-id": "nsx-vlan-transportzone-44", "segmentation_id": 44, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e77c195-60", "ovs_interfaceid": "2e77c195-607d-43f7-a712-00157b5b9e01", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1947.293889] env[62405]: DEBUG oslo_concurrency.lockutils [None req-772d3a65-2cfe-4e81-8299-60125aaab52f tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Lock "67bf25ea-5774-4246-a3e6-2aeb0ebf6731" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.468s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1947.344714] env[62405]: DEBUG nova.compute.manager [req-795e12c8-c7fb-475b-9fd5-56d155aa5ba0 req-3551f794-37ef-4fd7-bd18-cb348b5e606d service nova] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] Received event network-changed-d440b728-2371-4e75-bb9f-2330f0318cae {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1947.344714] env[62405]: DEBUG nova.compute.manager [req-795e12c8-c7fb-475b-9fd5-56d155aa5ba0 req-3551f794-37ef-4fd7-bd18-cb348b5e606d service nova] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] Refreshing instance network info cache due to event network-changed-d440b728-2371-4e75-bb9f-2330f0318cae. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1947.344917] env[62405]: DEBUG oslo_concurrency.lockutils [req-795e12c8-c7fb-475b-9fd5-56d155aa5ba0 req-3551f794-37ef-4fd7-bd18-cb348b5e606d service nova] Acquiring lock "refresh_cache-556e1bca-f2f1-4200-96df-997d48ce5a15" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1947.345110] env[62405]: DEBUG oslo_concurrency.lockutils [req-795e12c8-c7fb-475b-9fd5-56d155aa5ba0 req-3551f794-37ef-4fd7-bd18-cb348b5e606d service nova] Acquired lock "refresh_cache-556e1bca-f2f1-4200-96df-997d48ce5a15" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1947.345279] env[62405]: DEBUG nova.network.neutron [req-795e12c8-c7fb-475b-9fd5-56d155aa5ba0 req-3551f794-37ef-4fd7-bd18-cb348b5e606d service nova] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] Refreshing network info cache for port d440b728-2371-4e75-bb9f-2330f0318cae {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1947.386287] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3fd5af0a-10f8-4840-96c6-11ede660bacc tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Acquiring lock "86378df0-a658-427d-aca5-de25f84eb28b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1947.386473] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3fd5af0a-10f8-4840-96c6-11ede660bacc tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Lock "86378df0-a658-427d-aca5-de25f84eb28b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1947.386752] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3fd5af0a-10f8-4840-96c6-11ede660bacc tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Acquiring lock "86378df0-a658-427d-aca5-de25f84eb28b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1947.386968] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3fd5af0a-10f8-4840-96c6-11ede660bacc tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Lock "86378df0-a658-427d-aca5-de25f84eb28b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1947.387224] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3fd5af0a-10f8-4840-96c6-11ede660bacc tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Lock "86378df0-a658-427d-aca5-de25f84eb28b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1947.389578] env[62405]: INFO nova.compute.manager [None req-3fd5af0a-10f8-4840-96c6-11ede660bacc tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 86378df0-a658-427d-aca5-de25f84eb28b] Terminating instance [ 1947.465323] env[62405]: DEBUG oslo_vmware.api [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5293aa71-455d-da26-9c05-35bbec81effb, 'name': SearchDatastore_Task, 'duration_secs': 0.041679} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1947.468390] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1947.468710] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] ec0a05fc-4a11-4e07-a03c-e357a7a750ab/ec0a05fc-4a11-4e07-a03c-e357a7a750ab.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1947.469280] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7896e68d-c00f-4ed2-9082-402ac20284a6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.477672] env[62405]: DEBUG oslo_vmware.api [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for the task: (returnval){ [ 1947.477672] env[62405]: value = "task-1947889" [ 1947.477672] env[62405]: _type = "Task" [ 1947.477672] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1947.493239] env[62405]: DEBUG oslo_vmware.api [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947889, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1947.648293] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-577b195e-8dfb-4dee-a7e2-74dcdb3db18d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.431659] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Releasing lock "refresh_cache-1f8293f9-5fba-4bf4-bf7c-65837c1092a0" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1948.431974] env[62405]: DEBUG nova.compute.manager [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] [instance: 1f8293f9-5fba-4bf4-bf7c-65837c1092a0] Instance network_info: |[{"id": "2e77c195-607d-43f7-a712-00157b5b9e01", "address": "fa:16:3e:b9:73:f1", "network": {"id": "b807ad3b-1f1d-485f-a8e7-8cfaa4c5ac19", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1212780647-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "77738cedb721478cba2cf27fa227bb3c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8c58d99d-ec12-4fc3-ab39-042b3f8cbb89", "external-id": "nsx-vlan-transportzone-44", "segmentation_id": 44, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e77c195-60", "ovs_interfaceid": "2e77c195-607d-43f7-a712-00157b5b9e01", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1948.436128] env[62405]: DEBUG nova.compute.manager [None req-3fd5af0a-10f8-4840-96c6-11ede660bacc tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 86378df0-a658-427d-aca5-de25f84eb28b] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1948.436128] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-3fd5af0a-10f8-4840-96c6-11ede660bacc tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 86378df0-a658-427d-aca5-de25f84eb28b] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1948.439888] env[62405]: DEBUG oslo_concurrency.lockutils [req-6549792d-4582-4408-8299-ce1583a04d3f req-f0273e02-bb02-4caf-8811-afe3ac59b2e3 service nova] Acquired lock "refresh_cache-1f8293f9-5fba-4bf4-bf7c-65837c1092a0" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1948.440084] env[62405]: DEBUG nova.network.neutron [req-6549792d-4582-4408-8299-ce1583a04d3f req-f0273e02-bb02-4caf-8811-afe3ac59b2e3 service nova] [instance: 1f8293f9-5fba-4bf4-bf7c-65837c1092a0] Refreshing network info cache for port 2e77c195-607d-43f7-a712-00157b5b9e01 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1948.441182] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] [instance: 1f8293f9-5fba-4bf4-bf7c-65837c1092a0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b9:73:f1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8c58d99d-ec12-4fc3-ab39-042b3f8cbb89', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2e77c195-607d-43f7-a712-00157b5b9e01', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1948.449293] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Creating folder: Project (77738cedb721478cba2cf27fa227bb3c). Parent ref: group-v401284. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1948.452455] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bf6d3e2-abbe-470e-a3da-2f1d441ff968 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.459251] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a169bae7-f6c0-4906-9794-d3376e1e4da2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.464970] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-77cc2fd2-7244-4896-8553-34cc0aa32eff {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.466693] env[62405]: DEBUG oslo_concurrency.lockutils [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquiring lock "15718289-5c19-4c2d-a9d8-d30ce0d63c68" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1948.466878] env[62405]: DEBUG oslo_concurrency.lockutils [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Lock "15718289-5c19-4c2d-a9d8-d30ce0d63c68" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1948.507165] env[62405]: DEBUG oslo_vmware.api [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947889, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.488814} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1948.507467] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fd5af0a-10f8-4840-96c6-11ede660bacc tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 86378df0-a658-427d-aca5-de25f84eb28b] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1948.507723] env[62405]: DEBUG oslo_vmware.api [None req-cd885ba3-5635-4b36-b484-c2d1c596439a tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1947888, 'name': ReconfigVM_Task, 'duration_secs': 0.302196} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1948.511428] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed59f127-16fb-4898-acf5-64a0ea50efe2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.515020] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] ec0a05fc-4a11-4e07-a03c-e357a7a750ab/ec0a05fc-4a11-4e07-a03c-e357a7a750ab.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1948.515020] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: ec0a05fc-4a11-4e07-a03c-e357a7a750ab] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1948.515020] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3f39d2c1-742b-40ff-ab49-6ab0f49b60db {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.516013] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-cd885ba3-5635-4b36-b484-c2d1c596439a tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 0d2b305d-d754-413c-afdf-3a2e8f143891] Reconfigured VM instance instance-00000060 to attach disk [datastore1] 0d2b305d-d754-413c-afdf-3a2e8f143891/e6bba7a8-c2de-41dc-871a-3859bba5f4f9-rescue.vmdk or device None with type thin {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1948.516389] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Created folder: Project (77738cedb721478cba2cf27fa227bb3c) in parent group-v401284. [ 1948.516565] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Creating folder: Instances. Parent ref: group-v401551. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1948.518618] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-924a73cc-5d8d-4663-bdae-b71439fee620 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.521396] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23d8a892-57d5-4b2c-ae32-f4d0a4dba3da {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.523934] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-165c5cbb-0e86-40db-a221-280e4a3d2b43 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.550887] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26e96903-3b4c-4d4f-97ed-baffc2ffd308 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.563220] env[62405]: DEBUG oslo_vmware.api [None req-3fd5af0a-10f8-4840-96c6-11ede660bacc tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Waiting for the task: (returnval){ [ 1948.563220] env[62405]: value = "task-1947891" [ 1948.563220] env[62405]: _type = "Task" [ 1948.563220] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1948.563888] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f4f587a2-ecb8-4c75-b089-4d68b6e96a3d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.574161] env[62405]: DEBUG oslo_vmware.api [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for the task: (returnval){ [ 1948.574161] env[62405]: value = "task-1947893" [ 1948.574161] env[62405]: _type = "Task" [ 1948.574161] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1948.574436] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Created folder: Instances in parent group-v401551. [ 1948.574614] env[62405]: DEBUG oslo.service.loopingcall [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1948.581172] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1f8293f9-5fba-4bf4-bf7c-65837c1092a0] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1948.592103] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bc510cb6-a25b-4020-bfe3-dfd66f93bef4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.607030] env[62405]: DEBUG nova.compute.provider_tree [None req-50c4893f-da17-4ac3-a25e-8875954fe761 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1948.613047] env[62405]: DEBUG oslo_vmware.api [None req-cd885ba3-5635-4b36-b484-c2d1c596439a tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Waiting for the task: (returnval){ [ 1948.613047] env[62405]: value = "task-1947894" [ 1948.613047] env[62405]: _type = "Task" [ 1948.613047] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1948.620174] env[62405]: DEBUG oslo_vmware.api [None req-3fd5af0a-10f8-4840-96c6-11ede660bacc tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947891, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1948.625073] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1948.625073] env[62405]: value = "task-1947895" [ 1948.625073] env[62405]: _type = "Task" [ 1948.625073] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1948.625683] env[62405]: DEBUG oslo_vmware.api [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947893, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074268} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1948.626483] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: ec0a05fc-4a11-4e07-a03c-e357a7a750ab] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1948.630575] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec48cb5d-4420-4d1f-b607-3f8603d78e3f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.636409] env[62405]: DEBUG oslo_vmware.api [None req-cd885ba3-5635-4b36-b484-c2d1c596439a tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1947894, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1948.641625] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947895, 'name': CreateVM_Task} progress is 10%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1948.661244] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: ec0a05fc-4a11-4e07-a03c-e357a7a750ab] Reconfiguring VM instance instance-00000062 to attach disk [datastore1] ec0a05fc-4a11-4e07-a03c-e357a7a750ab/ec0a05fc-4a11-4e07-a03c-e357a7a750ab.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1948.664040] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-768ef32c-3b11-4d77-847d-5c59dc93fef0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.686707] env[62405]: DEBUG oslo_vmware.api [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for the task: (returnval){ [ 1948.686707] env[62405]: value = "task-1947896" [ 1948.686707] env[62405]: _type = "Task" [ 1948.686707] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1948.695069] env[62405]: DEBUG oslo_vmware.api [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947896, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1948.793642] env[62405]: DEBUG nova.network.neutron [req-795e12c8-c7fb-475b-9fd5-56d155aa5ba0 req-3551f794-37ef-4fd7-bd18-cb348b5e606d service nova] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] Updated VIF entry in instance network info cache for port d440b728-2371-4e75-bb9f-2330f0318cae. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1948.794054] env[62405]: DEBUG nova.network.neutron [req-795e12c8-c7fb-475b-9fd5-56d155aa5ba0 req-3551f794-37ef-4fd7-bd18-cb348b5e606d service nova] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] Updating instance_info_cache with network_info: [{"id": "d440b728-2371-4e75-bb9f-2330f0318cae", "address": "fa:16:3e:ea:15:ee", "network": {"id": "8e7f7222-48db-4dd5-a9e8-9a6d2b598918", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1945720715-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.152", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba9083cddcc24345b6ea5d2cbbbec5ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd440b728-23", "ovs_interfaceid": "d440b728-2371-4e75-bb9f-2330f0318cae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1948.939320] env[62405]: DEBUG nova.network.neutron [req-6549792d-4582-4408-8299-ce1583a04d3f req-f0273e02-bb02-4caf-8811-afe3ac59b2e3 service nova] [instance: 1f8293f9-5fba-4bf4-bf7c-65837c1092a0] Updated VIF entry in instance network info cache for port 2e77c195-607d-43f7-a712-00157b5b9e01. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1948.939768] env[62405]: DEBUG nova.network.neutron [req-6549792d-4582-4408-8299-ce1583a04d3f req-f0273e02-bb02-4caf-8811-afe3ac59b2e3 service nova] [instance: 1f8293f9-5fba-4bf4-bf7c-65837c1092a0] Updating instance_info_cache with network_info: [{"id": "2e77c195-607d-43f7-a712-00157b5b9e01", "address": "fa:16:3e:b9:73:f1", "network": {"id": "b807ad3b-1f1d-485f-a8e7-8cfaa4c5ac19", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1212780647-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "77738cedb721478cba2cf27fa227bb3c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8c58d99d-ec12-4fc3-ab39-042b3f8cbb89", "external-id": "nsx-vlan-transportzone-44", "segmentation_id": 44, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e77c195-60", "ovs_interfaceid": "2e77c195-607d-43f7-a712-00157b5b9e01", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1948.969243] env[62405]: DEBUG nova.compute.manager [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1949.084139] env[62405]: DEBUG oslo_vmware.api [None req-3fd5af0a-10f8-4840-96c6-11ede660bacc tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947891, 'name': PowerOffVM_Task, 'duration_secs': 0.29513} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1949.084305] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fd5af0a-10f8-4840-96c6-11ede660bacc tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 86378df0-a658-427d-aca5-de25f84eb28b] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1949.084504] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-3fd5af0a-10f8-4840-96c6-11ede660bacc tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 86378df0-a658-427d-aca5-de25f84eb28b] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1949.084859] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6871bd95-67e0-4a35-b7d7-a3259d439c5f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.109888] env[62405]: DEBUG nova.scheduler.client.report [None req-50c4893f-da17-4ac3-a25e-8875954fe761 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1949.125669] env[62405]: DEBUG oslo_vmware.api [None req-cd885ba3-5635-4b36-b484-c2d1c596439a tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1947894, 'name': ReconfigVM_Task, 'duration_secs': 0.260698} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1949.126642] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd885ba3-5635-4b36-b484-c2d1c596439a tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 0d2b305d-d754-413c-afdf-3a2e8f143891] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1949.127195] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e27a7c1a-91e8-4207-bace-63037b2f4a15 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.137958] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947895, 'name': CreateVM_Task, 'duration_secs': 0.434304} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1949.139167] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1f8293f9-5fba-4bf4-bf7c-65837c1092a0] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1949.139740] env[62405]: DEBUG oslo_vmware.api [None req-cd885ba3-5635-4b36-b484-c2d1c596439a tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Waiting for the task: (returnval){ [ 1949.139740] env[62405]: value = "task-1947898" [ 1949.139740] env[62405]: _type = "Task" [ 1949.139740] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1949.140400] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1949.140555] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1949.140868] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1949.141164] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6d449079-611f-4dfb-b219-e0b7c3e49028 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.149144] env[62405]: DEBUG oslo_vmware.api [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Waiting for the task: (returnval){ [ 1949.149144] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52477819-0669-33e7-c000-a2a89ac18f76" [ 1949.149144] env[62405]: _type = "Task" [ 1949.149144] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1949.152138] env[62405]: DEBUG oslo_vmware.api [None req-cd885ba3-5635-4b36-b484-c2d1c596439a tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1947898, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1949.159856] env[62405]: DEBUG oslo_vmware.api [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52477819-0669-33e7-c000-a2a89ac18f76, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1949.196238] env[62405]: DEBUG oslo_vmware.api [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947896, 'name': ReconfigVM_Task, 'duration_secs': 0.390023} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1949.196534] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: ec0a05fc-4a11-4e07-a03c-e357a7a750ab] Reconfigured VM instance instance-00000062 to attach disk [datastore1] ec0a05fc-4a11-4e07-a03c-e357a7a750ab/ec0a05fc-4a11-4e07-a03c-e357a7a750ab.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1949.197423] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a8232968-e0e8-4493-af6b-b52f6fdd205e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.203431] env[62405]: DEBUG oslo_vmware.api [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for the task: (returnval){ [ 1949.203431] env[62405]: value = "task-1947899" [ 1949.203431] env[62405]: _type = "Task" [ 1949.203431] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1949.211815] env[62405]: DEBUG oslo_vmware.api [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947899, 'name': Rename_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1949.252119] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-3fd5af0a-10f8-4840-96c6-11ede660bacc tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 86378df0-a658-427d-aca5-de25f84eb28b] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1949.252432] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-3fd5af0a-10f8-4840-96c6-11ede660bacc tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 86378df0-a658-427d-aca5-de25f84eb28b] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1949.252647] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-3fd5af0a-10f8-4840-96c6-11ede660bacc tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Deleting the datastore file [datastore1] 86378df0-a658-427d-aca5-de25f84eb28b {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1949.252932] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0fcb40ad-7029-4241-a7d3-6e9784876542 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.259696] env[62405]: DEBUG oslo_vmware.api [None req-3fd5af0a-10f8-4840-96c6-11ede660bacc tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Waiting for the task: (returnval){ [ 1949.259696] env[62405]: value = "task-1947900" [ 1949.259696] env[62405]: _type = "Task" [ 1949.259696] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1949.267723] env[62405]: DEBUG oslo_vmware.api [None req-3fd5af0a-10f8-4840-96c6-11ede660bacc tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947900, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1949.297496] env[62405]: DEBUG oslo_concurrency.lockutils [req-795e12c8-c7fb-475b-9fd5-56d155aa5ba0 req-3551f794-37ef-4fd7-bd18-cb348b5e606d service nova] Releasing lock "refresh_cache-556e1bca-f2f1-4200-96df-997d48ce5a15" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1949.443160] env[62405]: DEBUG oslo_concurrency.lockutils [req-6549792d-4582-4408-8299-ce1583a04d3f req-f0273e02-bb02-4caf-8811-afe3ac59b2e3 service nova] Releasing lock "refresh_cache-1f8293f9-5fba-4bf4-bf7c-65837c1092a0" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1949.492193] env[62405]: DEBUG oslo_concurrency.lockutils [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1949.617402] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Acquiring lock "14512ed2-9eae-4753-b83c-8c0d0d5d9432" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1949.617642] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Lock "14512ed2-9eae-4753-b83c-8c0d0d5d9432" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1949.651575] env[62405]: DEBUG oslo_vmware.api [None req-cd885ba3-5635-4b36-b484-c2d1c596439a tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1947898, 'name': PowerOnVM_Task, 'duration_secs': 0.49053} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1949.653023] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd885ba3-5635-4b36-b484-c2d1c596439a tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 0d2b305d-d754-413c-afdf-3a2e8f143891] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1949.657142] env[62405]: DEBUG nova.compute.manager [None req-cd885ba3-5635-4b36-b484-c2d1c596439a tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 0d2b305d-d754-413c-afdf-3a2e8f143891] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1949.658146] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63111731-e8ee-455d-b926-4f3d589a58b7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.666594] env[62405]: DEBUG oslo_vmware.api [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52477819-0669-33e7-c000-a2a89ac18f76, 'name': SearchDatastore_Task, 'duration_secs': 0.019079} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1949.668205] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1949.668448] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] [instance: 1f8293f9-5fba-4bf4-bf7c-65837c1092a0] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1949.668680] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1949.668827] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1949.669014] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1949.671357] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2fff8d17-6ab5-4c28-bb6e-7dfc77143f02 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.682662] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1949.682846] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1949.683805] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2033f771-8908-48c5-8a56-ad1034e66571 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.689232] env[62405]: DEBUG oslo_vmware.api [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Waiting for the task: (returnval){ [ 1949.689232] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52a9cb7d-85ca-ec1a-57ed-b0bb40468e3b" [ 1949.689232] env[62405]: _type = "Task" [ 1949.689232] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1949.696404] env[62405]: DEBUG oslo_vmware.api [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52a9cb7d-85ca-ec1a-57ed-b0bb40468e3b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1949.712656] env[62405]: DEBUG oslo_vmware.api [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947899, 'name': Rename_Task, 'duration_secs': 0.164623} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1949.712959] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: ec0a05fc-4a11-4e07-a03c-e357a7a750ab] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1949.713143] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0857bce5-5f88-42f0-8f1d-f32f97fe60ad {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.719287] env[62405]: DEBUG oslo_vmware.api [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for the task: (returnval){ [ 1949.719287] env[62405]: value = "task-1947901" [ 1949.719287] env[62405]: _type = "Task" [ 1949.719287] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1949.726437] env[62405]: DEBUG oslo_vmware.api [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947901, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1949.770556] env[62405]: DEBUG oslo_vmware.api [None req-3fd5af0a-10f8-4840-96c6-11ede660bacc tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947900, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.300367} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1949.770815] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-3fd5af0a-10f8-4840-96c6-11ede660bacc tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1949.770995] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-3fd5af0a-10f8-4840-96c6-11ede660bacc tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 86378df0-a658-427d-aca5-de25f84eb28b] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1949.771176] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-3fd5af0a-10f8-4840-96c6-11ede660bacc tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 86378df0-a658-427d-aca5-de25f84eb28b] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1949.771346] env[62405]: INFO nova.compute.manager [None req-3fd5af0a-10f8-4840-96c6-11ede660bacc tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 86378df0-a658-427d-aca5-de25f84eb28b] Took 1.34 seconds to destroy the instance on the hypervisor. [ 1949.771569] env[62405]: DEBUG oslo.service.loopingcall [None req-3fd5af0a-10f8-4840-96c6-11ede660bacc tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1949.771752] env[62405]: DEBUG nova.compute.manager [-] [instance: 86378df0-a658-427d-aca5-de25f84eb28b] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1949.771859] env[62405]: DEBUG nova.network.neutron [-] [instance: 86378df0-a658-427d-aca5-de25f84eb28b] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1950.121141] env[62405]: DEBUG oslo_concurrency.lockutils [None req-50c4893f-da17-4ac3-a25e-8875954fe761 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 3.359s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1950.124472] env[62405]: DEBUG nova.compute.manager [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 14512ed2-9eae-4753-b83c-8c0d0d5d9432] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1950.126543] env[62405]: DEBUG oslo_concurrency.lockutils [None req-db095d67-8972-456d-a475-f7f18507a1ab tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.711s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1950.128024] env[62405]: DEBUG nova.objects.instance [None req-db095d67-8972-456d-a475-f7f18507a1ab tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Lazy-loading 'resources' on Instance uuid f16e3d13-6db6-4f61-b0e4-661856a9166b {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1950.199971] env[62405]: DEBUG oslo_vmware.api [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52a9cb7d-85ca-ec1a-57ed-b0bb40468e3b, 'name': SearchDatastore_Task, 'duration_secs': 0.014913} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1950.200889] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ca4f1284-88c7-41d7-87a5-94928511df7d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.206050] env[62405]: DEBUG oslo_vmware.api [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Waiting for the task: (returnval){ [ 1950.206050] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]527cca86-6613-749a-d0ec-4cc96b4ac2f9" [ 1950.206050] env[62405]: _type = "Task" [ 1950.206050] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1950.213548] env[62405]: DEBUG oslo_vmware.api [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]527cca86-6613-749a-d0ec-4cc96b4ac2f9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1950.227653] env[62405]: DEBUG oslo_vmware.api [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947901, 'name': PowerOnVM_Task} progress is 71%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1950.375088] env[62405]: DEBUG nova.compute.manager [req-029c4496-68b4-4198-8b32-c803047bf9c3 req-4d53d789-a05d-4c29-9173-101de3bb63f3 service nova] [instance: 86378df0-a658-427d-aca5-de25f84eb28b] Received event network-vif-deleted-f58e9a5c-89b1-4aff-8825-ba44d10e8d46 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1950.375088] env[62405]: INFO nova.compute.manager [req-029c4496-68b4-4198-8b32-c803047bf9c3 req-4d53d789-a05d-4c29-9173-101de3bb63f3 service nova] [instance: 86378df0-a658-427d-aca5-de25f84eb28b] Neutron deleted interface f58e9a5c-89b1-4aff-8825-ba44d10e8d46; detaching it from the instance and deleting it from the info cache [ 1950.375088] env[62405]: DEBUG nova.network.neutron [req-029c4496-68b4-4198-8b32-c803047bf9c3 req-4d53d789-a05d-4c29-9173-101de3bb63f3 service nova] [instance: 86378df0-a658-427d-aca5-de25f84eb28b] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1950.639132] env[62405]: DEBUG nova.objects.instance [None req-db095d67-8972-456d-a475-f7f18507a1ab tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Lazy-loading 'numa_topology' on Instance uuid f16e3d13-6db6-4f61-b0e4-661856a9166b {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1950.662996] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1950.693208] env[62405]: INFO nova.scheduler.client.report [None req-50c4893f-da17-4ac3-a25e-8875954fe761 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Deleted allocation for migration e28e937c-0b2d-4bf2-9f78-c26841fab210 [ 1950.721066] env[62405]: DEBUG oslo_vmware.api [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]527cca86-6613-749a-d0ec-4cc96b4ac2f9, 'name': SearchDatastore_Task, 'duration_secs': 0.02667} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1950.724487] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1950.724928] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 1f8293f9-5fba-4bf4-bf7c-65837c1092a0/1f8293f9-5fba-4bf4-bf7c-65837c1092a0.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1950.725028] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c388273a-c556-4e36-b098-457fde6ee66f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.732776] env[62405]: DEBUG oslo_vmware.api [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947901, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1950.734093] env[62405]: DEBUG oslo_vmware.api [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Waiting for the task: (returnval){ [ 1950.734093] env[62405]: value = "task-1947902" [ 1950.734093] env[62405]: _type = "Task" [ 1950.734093] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1950.741705] env[62405]: DEBUG oslo_vmware.api [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Task: {'id': task-1947902, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1950.821800] env[62405]: DEBUG nova.network.neutron [-] [instance: 86378df0-a658-427d-aca5-de25f84eb28b] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1950.877205] env[62405]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1593aad7-f736-4523-8e74-aedf94b907f2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.893552] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06febdb3-4400-43a9-9ff4-66467a01d6a3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.929700] env[62405]: DEBUG nova.compute.manager [req-029c4496-68b4-4198-8b32-c803047bf9c3 req-4d53d789-a05d-4c29-9173-101de3bb63f3 service nova] [instance: 86378df0-a658-427d-aca5-de25f84eb28b] Detach interface failed, port_id=f58e9a5c-89b1-4aff-8825-ba44d10e8d46, reason: Instance 86378df0-a658-427d-aca5-de25f84eb28b could not be found. {{(pid=62405) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11483}} [ 1951.141929] env[62405]: DEBUG nova.objects.base [None req-db095d67-8972-456d-a475-f7f18507a1ab tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=62405) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1951.208148] env[62405]: DEBUG oslo_concurrency.lockutils [None req-50c4893f-da17-4ac3-a25e-8875954fe761 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Lock "81aebf11-5d80-4a86-b232-3ecc5f3892c2" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 27.075s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1951.232526] env[62405]: DEBUG oslo_vmware.api [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947901, 'name': PowerOnVM_Task, 'duration_secs': 1.090922} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1951.232807] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: ec0a05fc-4a11-4e07-a03c-e357a7a750ab] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1951.233018] env[62405]: INFO nova.compute.manager [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: ec0a05fc-4a11-4e07-a03c-e357a7a750ab] Took 8.11 seconds to spawn the instance on the hypervisor. [ 1951.233204] env[62405]: DEBUG nova.compute.manager [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: ec0a05fc-4a11-4e07-a03c-e357a7a750ab] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1951.233961] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9803d25-833f-4f0f-82a0-33e1ff2365d3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1951.247785] env[62405]: DEBUG oslo_vmware.api [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Task: {'id': task-1947902, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.50095} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1951.250748] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 1f8293f9-5fba-4bf4-bf7c-65837c1092a0/1f8293f9-5fba-4bf4-bf7c-65837c1092a0.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1951.250970] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] [instance: 1f8293f9-5fba-4bf4-bf7c-65837c1092a0] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1951.252742] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-420584d4-d1a3-4364-8db4-2aa75a6ef2ab {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1951.261708] env[62405]: DEBUG oslo_vmware.api [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Waiting for the task: (returnval){ [ 1951.261708] env[62405]: value = "task-1947903" [ 1951.261708] env[62405]: _type = "Task" [ 1951.261708] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1951.270887] env[62405]: DEBUG oslo_vmware.api [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Task: {'id': task-1947903, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1951.324419] env[62405]: INFO nova.compute.manager [-] [instance: 86378df0-a658-427d-aca5-de25f84eb28b] Took 1.55 seconds to deallocate network for instance. [ 1951.390029] env[62405]: DEBUG oslo_concurrency.lockutils [None req-46193ebe-2a0d-417d-972c-1ca9633cec16 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquiring lock "81aebf11-5d80-4a86-b232-3ecc5f3892c2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1951.390029] env[62405]: DEBUG oslo_concurrency.lockutils [None req-46193ebe-2a0d-417d-972c-1ca9633cec16 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Lock "81aebf11-5d80-4a86-b232-3ecc5f3892c2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1951.390029] env[62405]: DEBUG oslo_concurrency.lockutils [None req-46193ebe-2a0d-417d-972c-1ca9633cec16 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquiring lock "81aebf11-5d80-4a86-b232-3ecc5f3892c2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1951.390029] env[62405]: DEBUG oslo_concurrency.lockutils [None req-46193ebe-2a0d-417d-972c-1ca9633cec16 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Lock "81aebf11-5d80-4a86-b232-3ecc5f3892c2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1951.390029] env[62405]: DEBUG oslo_concurrency.lockutils [None req-46193ebe-2a0d-417d-972c-1ca9633cec16 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Lock "81aebf11-5d80-4a86-b232-3ecc5f3892c2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1951.393811] env[62405]: INFO nova.compute.manager [None req-46193ebe-2a0d-417d-972c-1ca9633cec16 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: 81aebf11-5d80-4a86-b232-3ecc5f3892c2] Terminating instance [ 1951.429033] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d98bd3e6-77a3-49c0-a478-686f48d33a9f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1951.435884] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae420e0b-2eb3-4329-a692-103aee83479a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1951.467659] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-722dfacc-b2b3-4037-8cc2-589a60aa16f4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1951.475869] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52ff5ceb-167b-481e-b5d2-9ebbc54d35d1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1951.489128] env[62405]: DEBUG nova.compute.provider_tree [None req-db095d67-8972-456d-a475-f7f18507a1ab tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1951.766512] env[62405]: INFO nova.compute.manager [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: ec0a05fc-4a11-4e07-a03c-e357a7a750ab] Took 34.16 seconds to build instance. [ 1951.773212] env[62405]: DEBUG oslo_vmware.api [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Task: {'id': task-1947903, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071593} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1951.773510] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] [instance: 1f8293f9-5fba-4bf4-bf7c-65837c1092a0] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1951.774308] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d559ae23-77fa-4e9d-bb91-125ff6e8b64f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1951.797093] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] [instance: 1f8293f9-5fba-4bf4-bf7c-65837c1092a0] Reconfiguring VM instance instance-00000063 to attach disk [datastore1] 1f8293f9-5fba-4bf4-bf7c-65837c1092a0/1f8293f9-5fba-4bf4-bf7c-65837c1092a0.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1951.797582] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b812c92a-4817-4174-9ded-a085a6479c11 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1951.818787] env[62405]: DEBUG oslo_vmware.api [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Waiting for the task: (returnval){ [ 1951.818787] env[62405]: value = "task-1947904" [ 1951.818787] env[62405]: _type = "Task" [ 1951.818787] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1951.827322] env[62405]: DEBUG oslo_vmware.api [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Task: {'id': task-1947904, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1951.833261] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3fd5af0a-10f8-4840-96c6-11ede660bacc tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1951.874646] env[62405]: DEBUG oslo_concurrency.lockutils [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquiring lock "d937c90c-10b2-4c57-b1db-7b433c3d9017" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1951.875810] env[62405]: DEBUG oslo_concurrency.lockutils [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Lock "d937c90c-10b2-4c57-b1db-7b433c3d9017" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1951.897939] env[62405]: DEBUG nova.compute.manager [None req-46193ebe-2a0d-417d-972c-1ca9633cec16 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: 81aebf11-5d80-4a86-b232-3ecc5f3892c2] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1951.899047] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-46193ebe-2a0d-417d-972c-1ca9633cec16 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: 81aebf11-5d80-4a86-b232-3ecc5f3892c2] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1951.899614] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbd241c6-e5d9-4a95-9fcd-4479d9f32699 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1951.913200] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-46193ebe-2a0d-417d-972c-1ca9633cec16 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: 81aebf11-5d80-4a86-b232-3ecc5f3892c2] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1951.913497] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e3571d2d-a8ad-4c6d-b449-50ef5de0db67 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1951.920159] env[62405]: DEBUG oslo_vmware.api [None req-46193ebe-2a0d-417d-972c-1ca9633cec16 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for the task: (returnval){ [ 1951.920159] env[62405]: value = "task-1947905" [ 1951.920159] env[62405]: _type = "Task" [ 1951.920159] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1951.929401] env[62405]: DEBUG oslo_vmware.api [None req-46193ebe-2a0d-417d-972c-1ca9633cec16 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947905, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1951.994148] env[62405]: DEBUG nova.scheduler.client.report [None req-db095d67-8972-456d-a475-f7f18507a1ab tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1952.268889] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3087fc87-52f0-4ef9-a3a0-a1605987e698 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Lock "ec0a05fc-4a11-4e07-a03c-e357a7a750ab" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.675s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1952.330901] env[62405]: DEBUG oslo_vmware.api [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Task: {'id': task-1947904, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1952.338708] env[62405]: INFO nova.compute.manager [None req-f20f8e4a-c0d7-4b59-80c5-73e3a6e2c965 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: a91a6d04-2ec0-4568-bdb3-732d148644de] Rescuing [ 1952.339402] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f20f8e4a-c0d7-4b59-80c5-73e3a6e2c965 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Acquiring lock "refresh_cache-a91a6d04-2ec0-4568-bdb3-732d148644de" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1952.339402] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f20f8e4a-c0d7-4b59-80c5-73e3a6e2c965 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Acquired lock "refresh_cache-a91a6d04-2ec0-4568-bdb3-732d148644de" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1952.339402] env[62405]: DEBUG nova.network.neutron [None req-f20f8e4a-c0d7-4b59-80c5-73e3a6e2c965 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: a91a6d04-2ec0-4568-bdb3-732d148644de] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1952.379148] env[62405]: DEBUG nova.compute.manager [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: d937c90c-10b2-4c57-b1db-7b433c3d9017] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1952.431206] env[62405]: DEBUG oslo_vmware.api [None req-46193ebe-2a0d-417d-972c-1ca9633cec16 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947905, 'name': PowerOffVM_Task, 'duration_secs': 0.199402} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1952.431294] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-46193ebe-2a0d-417d-972c-1ca9633cec16 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: 81aebf11-5d80-4a86-b232-3ecc5f3892c2] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1952.431421] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-46193ebe-2a0d-417d-972c-1ca9633cec16 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: 81aebf11-5d80-4a86-b232-3ecc5f3892c2] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1952.432040] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6591e350-21d1-49eb-8899-cf26681f3d8d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.499484] env[62405]: DEBUG oslo_concurrency.lockutils [None req-db095d67-8972-456d-a475-f7f18507a1ab tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.373s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1952.502108] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.705s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1952.504126] env[62405]: INFO nova.compute.claims [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: ca0ff947-1ae0-4f19-ae71-0784f2c20ebe] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1952.551329] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-46193ebe-2a0d-417d-972c-1ca9633cec16 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: 81aebf11-5d80-4a86-b232-3ecc5f3892c2] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1952.551562] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-46193ebe-2a0d-417d-972c-1ca9633cec16 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: 81aebf11-5d80-4a86-b232-3ecc5f3892c2] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1952.551731] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-46193ebe-2a0d-417d-972c-1ca9633cec16 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Deleting the datastore file [datastore1] 81aebf11-5d80-4a86-b232-3ecc5f3892c2 {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1952.552161] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8c5d2b78-26f3-415f-b944-f381f11448a5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.558913] env[62405]: DEBUG oslo_vmware.api [None req-46193ebe-2a0d-417d-972c-1ca9633cec16 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for the task: (returnval){ [ 1952.558913] env[62405]: value = "task-1947907" [ 1952.558913] env[62405]: _type = "Task" [ 1952.558913] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1952.567185] env[62405]: DEBUG oslo_vmware.api [None req-46193ebe-2a0d-417d-972c-1ca9633cec16 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947907, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1952.831019] env[62405]: DEBUG oslo_vmware.api [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Task: {'id': task-1947904, 'name': ReconfigVM_Task, 'duration_secs': 0.581095} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1952.831368] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] [instance: 1f8293f9-5fba-4bf4-bf7c-65837c1092a0] Reconfigured VM instance instance-00000063 to attach disk [datastore1] 1f8293f9-5fba-4bf4-bf7c-65837c1092a0/1f8293f9-5fba-4bf4-bf7c-65837c1092a0.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1952.832013] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-10f72bee-f629-4f23-9790-90f4c0a544c1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.838646] env[62405]: DEBUG oslo_vmware.api [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Waiting for the task: (returnval){ [ 1952.838646] env[62405]: value = "task-1947908" [ 1952.838646] env[62405]: _type = "Task" [ 1952.838646] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1952.848287] env[62405]: DEBUG oslo_vmware.api [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Task: {'id': task-1947908, 'name': Rename_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1952.908549] env[62405]: DEBUG oslo_concurrency.lockutils [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1953.015179] env[62405]: DEBUG oslo_concurrency.lockutils [None req-db095d67-8972-456d-a475-f7f18507a1ab tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Lock "f16e3d13-6db6-4f61-b0e4-661856a9166b" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 48.888s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1953.016262] env[62405]: DEBUG oslo_concurrency.lockutils [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Lock "f16e3d13-6db6-4f61-b0e4-661856a9166b" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 23.522s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1953.016262] env[62405]: INFO nova.compute.manager [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Unshelving [ 1953.068759] env[62405]: DEBUG oslo_vmware.api [None req-46193ebe-2a0d-417d-972c-1ca9633cec16 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947907, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1953.253112] env[62405]: DEBUG nova.network.neutron [None req-f20f8e4a-c0d7-4b59-80c5-73e3a6e2c965 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: a91a6d04-2ec0-4568-bdb3-732d148644de] Updating instance_info_cache with network_info: [{"id": "744277fe-5ae4-47a1-8b6e-f92b066ed2a3", "address": "fa:16:3e:e9:00:52", "network": {"id": "bb161d6c-1986-486e-a6b7-5b1dacc985ee", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-590658377-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7541d8c77a3f434094bc30a4d402bfcb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap744277fe-5a", "ovs_interfaceid": "744277fe-5ae4-47a1-8b6e-f92b066ed2a3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1953.349698] env[62405]: DEBUG oslo_vmware.api [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Task: {'id': task-1947908, 'name': Rename_Task, 'duration_secs': 0.267727} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1953.350078] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] [instance: 1f8293f9-5fba-4bf4-bf7c-65837c1092a0] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1953.350407] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-681a8419-19c9-47b8-9a66-6744f4c16ea0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.357240] env[62405]: DEBUG oslo_vmware.api [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Waiting for the task: (returnval){ [ 1953.357240] env[62405]: value = "task-1947909" [ 1953.357240] env[62405]: _type = "Task" [ 1953.357240] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1953.366674] env[62405]: DEBUG oslo_vmware.api [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Task: {'id': task-1947909, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1953.479251] env[62405]: DEBUG nova.compute.manager [None req-5472e0a7-a69a-4211-86bb-156b0799f8ee tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: ec0a05fc-4a11-4e07-a03c-e357a7a750ab] Stashing vm_state: active {{(pid=62405) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1953.572735] env[62405]: DEBUG oslo_vmware.api [None req-46193ebe-2a0d-417d-972c-1ca9633cec16 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947907, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.518156} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1953.573240] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-46193ebe-2a0d-417d-972c-1ca9633cec16 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1953.573240] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-46193ebe-2a0d-417d-972c-1ca9633cec16 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: 81aebf11-5d80-4a86-b232-3ecc5f3892c2] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1953.573329] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-46193ebe-2a0d-417d-972c-1ca9633cec16 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: 81aebf11-5d80-4a86-b232-3ecc5f3892c2] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1953.573494] env[62405]: INFO nova.compute.manager [None req-46193ebe-2a0d-417d-972c-1ca9633cec16 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: 81aebf11-5d80-4a86-b232-3ecc5f3892c2] Took 1.68 seconds to destroy the instance on the hypervisor. [ 1953.573732] env[62405]: DEBUG oslo.service.loopingcall [None req-46193ebe-2a0d-417d-972c-1ca9633cec16 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1953.576284] env[62405]: DEBUG nova.compute.manager [-] [instance: 81aebf11-5d80-4a86-b232-3ecc5f3892c2] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1953.576461] env[62405]: DEBUG nova.network.neutron [-] [instance: 81aebf11-5d80-4a86-b232-3ecc5f3892c2] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1953.756391] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f20f8e4a-c0d7-4b59-80c5-73e3a6e2c965 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Releasing lock "refresh_cache-a91a6d04-2ec0-4568-bdb3-732d148644de" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1953.836868] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baec7e03-cc31-4d28-810e-46e8e2371377 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.844756] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3058b5c9-dd37-4429-87e6-def667c27bb8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.884367] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a13c6e62-a282-457d-ad9c-f909e1cee23e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.896356] env[62405]: DEBUG oslo_vmware.api [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Task: {'id': task-1947909, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1953.900164] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60373fe6-a4e6-479d-b453-8dec6de76b76 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.914231] env[62405]: DEBUG nova.compute.provider_tree [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1953.929830] env[62405]: DEBUG nova.compute.manager [req-79c999a5-8797-4339-87f0-d7cb5e0692be req-f75a7c5f-e9e6-4ff3-ae74-9b9230e4c7a3 service nova] [instance: 81aebf11-5d80-4a86-b232-3ecc5f3892c2] Received event network-vif-deleted-af199d5b-90da-4443-ac9d-e8d6bf721a80 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1953.930063] env[62405]: INFO nova.compute.manager [req-79c999a5-8797-4339-87f0-d7cb5e0692be req-f75a7c5f-e9e6-4ff3-ae74-9b9230e4c7a3 service nova] [instance: 81aebf11-5d80-4a86-b232-3ecc5f3892c2] Neutron deleted interface af199d5b-90da-4443-ac9d-e8d6bf721a80; detaching it from the instance and deleting it from the info cache [ 1953.930326] env[62405]: DEBUG nova.network.neutron [req-79c999a5-8797-4339-87f0-d7cb5e0692be req-f75a7c5f-e9e6-4ff3-ae74-9b9230e4c7a3 service nova] [instance: 81aebf11-5d80-4a86-b232-3ecc5f3892c2] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1954.009072] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5472e0a7-a69a-4211-86bb-156b0799f8ee tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1954.041220] env[62405]: DEBUG oslo_concurrency.lockutils [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1954.387616] env[62405]: DEBUG oslo_vmware.api [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Task: {'id': task-1947909, 'name': PowerOnVM_Task, 'duration_secs': 0.69508} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1954.387899] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] [instance: 1f8293f9-5fba-4bf4-bf7c-65837c1092a0] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1954.388118] env[62405]: INFO nova.compute.manager [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] [instance: 1f8293f9-5fba-4bf4-bf7c-65837c1092a0] Took 9.32 seconds to spawn the instance on the hypervisor. [ 1954.388303] env[62405]: DEBUG nova.compute.manager [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] [instance: 1f8293f9-5fba-4bf4-bf7c-65837c1092a0] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1954.389109] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35ef330c-9caf-4a04-9b20-e8da8435ca61 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1954.397514] env[62405]: DEBUG nova.network.neutron [-] [instance: 81aebf11-5d80-4a86-b232-3ecc5f3892c2] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1954.417539] env[62405]: DEBUG nova.scheduler.client.report [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1954.433677] env[62405]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0b0eac24-daa5-41bc-8835-10cdb724722a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1954.444524] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f197bca8-4831-486a-8395-efd3b31765ac {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1954.476249] env[62405]: DEBUG nova.compute.manager [req-79c999a5-8797-4339-87f0-d7cb5e0692be req-f75a7c5f-e9e6-4ff3-ae74-9b9230e4c7a3 service nova] [instance: 81aebf11-5d80-4a86-b232-3ecc5f3892c2] Detach interface failed, port_id=af199d5b-90da-4443-ac9d-e8d6bf721a80, reason: Instance 81aebf11-5d80-4a86-b232-3ecc5f3892c2 could not be found. {{(pid=62405) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11483}} [ 1954.905406] env[62405]: INFO nova.compute.manager [-] [instance: 81aebf11-5d80-4a86-b232-3ecc5f3892c2] Took 1.33 seconds to deallocate network for instance. [ 1954.909740] env[62405]: INFO nova.compute.manager [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] [instance: 1f8293f9-5fba-4bf4-bf7c-65837c1092a0] Took 31.76 seconds to build instance. [ 1954.922525] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.420s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1954.922973] env[62405]: DEBUG nova.compute.manager [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: ca0ff947-1ae0-4f19-ae71-0784f2c20ebe] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1954.925491] env[62405]: DEBUG oslo_concurrency.lockutils [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.380s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1954.927010] env[62405]: INFO nova.compute.claims [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: 1b820a12-4ca5-4b89-9016-81ebac4f1c3b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1955.301650] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-f20f8e4a-c0d7-4b59-80c5-73e3a6e2c965 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: a91a6d04-2ec0-4568-bdb3-732d148644de] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1955.302013] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d5c2b112-3183-48c9-beb0-380d6d50e939 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.311053] env[62405]: DEBUG oslo_vmware.api [None req-f20f8e4a-c0d7-4b59-80c5-73e3a6e2c965 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Waiting for the task: (returnval){ [ 1955.311053] env[62405]: value = "task-1947910" [ 1955.311053] env[62405]: _type = "Task" [ 1955.311053] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1955.318536] env[62405]: DEBUG oslo_vmware.api [None req-f20f8e4a-c0d7-4b59-80c5-73e3a6e2c965 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1947910, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1955.412545] env[62405]: DEBUG oslo_concurrency.lockutils [None req-46193ebe-2a0d-417d-972c-1ca9633cec16 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1955.413070] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4dfa7984-3ddb-499d-a9e9-517dca6cb24b tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Lock "1f8293f9-5fba-4bf4-bf7c-65837c1092a0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.287s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1955.431814] env[62405]: DEBUG nova.compute.utils [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1955.435417] env[62405]: DEBUG nova.compute.manager [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: ca0ff947-1ae0-4f19-ae71-0784f2c20ebe] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1955.435500] env[62405]: DEBUG nova.network.neutron [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: ca0ff947-1ae0-4f19-ae71-0784f2c20ebe] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1955.514488] env[62405]: DEBUG nova.policy [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '74ccaab252cb403bb54364c35d6dcbd4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1d2ff9a8cb1840889a4a2a87c663f59e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1955.822478] env[62405]: DEBUG oslo_vmware.api [None req-f20f8e4a-c0d7-4b59-80c5-73e3a6e2c965 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1947910, 'name': PowerOffVM_Task, 'duration_secs': 0.221154} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1955.822750] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-f20f8e4a-c0d7-4b59-80c5-73e3a6e2c965 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: a91a6d04-2ec0-4568-bdb3-732d148644de] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1955.826553] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f3b98a1-3731-4e0a-92f3-4a928b989012 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.849135] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d40adcfe-007a-45d7-95fe-a8af4d8e845c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.856632] env[62405]: DEBUG nova.network.neutron [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: ca0ff947-1ae0-4f19-ae71-0784f2c20ebe] Successfully created port: 9835277c-1b66-4088-ab4a-9d2b8e7e6266 {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1955.893298] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-f20f8e4a-c0d7-4b59-80c5-73e3a6e2c965 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: a91a6d04-2ec0-4568-bdb3-732d148644de] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1955.893588] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-738a5aa8-c95d-4c20-bf9d-4f8068a2413a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.900460] env[62405]: DEBUG oslo_vmware.api [None req-f20f8e4a-c0d7-4b59-80c5-73e3a6e2c965 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Waiting for the task: (returnval){ [ 1955.900460] env[62405]: value = "task-1947911" [ 1955.900460] env[62405]: _type = "Task" [ 1955.900460] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1955.908242] env[62405]: DEBUG oslo_vmware.api [None req-f20f8e4a-c0d7-4b59-80c5-73e3a6e2c965 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1947911, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1955.936482] env[62405]: DEBUG nova.compute.manager [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: ca0ff947-1ae0-4f19-ae71-0784f2c20ebe] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1956.160340] env[62405]: DEBUG oslo_concurrency.lockutils [None req-958236ab-98a7-438b-8acf-f1d28209a6b1 tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Acquiring lock "1f8293f9-5fba-4bf4-bf7c-65837c1092a0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1956.160340] env[62405]: DEBUG oslo_concurrency.lockutils [None req-958236ab-98a7-438b-8acf-f1d28209a6b1 tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Lock "1f8293f9-5fba-4bf4-bf7c-65837c1092a0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1956.160340] env[62405]: DEBUG oslo_concurrency.lockutils [None req-958236ab-98a7-438b-8acf-f1d28209a6b1 tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Acquiring lock "1f8293f9-5fba-4bf4-bf7c-65837c1092a0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1956.160541] env[62405]: DEBUG oslo_concurrency.lockutils [None req-958236ab-98a7-438b-8acf-f1d28209a6b1 tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Lock "1f8293f9-5fba-4bf4-bf7c-65837c1092a0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1956.160612] env[62405]: DEBUG oslo_concurrency.lockutils [None req-958236ab-98a7-438b-8acf-f1d28209a6b1 tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Lock "1f8293f9-5fba-4bf4-bf7c-65837c1092a0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1956.164654] env[62405]: INFO nova.compute.manager [None req-958236ab-98a7-438b-8acf-f1d28209a6b1 tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] [instance: 1f8293f9-5fba-4bf4-bf7c-65837c1092a0] Terminating instance [ 1956.233326] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5303663-6728-4f0a-b887-6e2b65c3e5b8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.242447] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7705372-e1ac-4da7-a35d-b7ca60407e70 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.271466] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1c7fda2-e30a-4949-9b9d-8bc455005c49 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.279446] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ba48e41-8adc-4d6c-9ff6-d2a9a4fd3b93 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.294558] env[62405]: DEBUG nova.compute.provider_tree [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1956.411770] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-f20f8e4a-c0d7-4b59-80c5-73e3a6e2c965 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: a91a6d04-2ec0-4568-bdb3-732d148644de] VM already powered off {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1956.412060] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-f20f8e4a-c0d7-4b59-80c5-73e3a6e2c965 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: a91a6d04-2ec0-4568-bdb3-732d148644de] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1956.412314] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f20f8e4a-c0d7-4b59-80c5-73e3a6e2c965 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1956.412497] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f20f8e4a-c0d7-4b59-80c5-73e3a6e2c965 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1956.412639] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-f20f8e4a-c0d7-4b59-80c5-73e3a6e2c965 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1956.412890] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-072ec5b3-40f0-494d-9cec-94e91e9cc055 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.422266] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-f20f8e4a-c0d7-4b59-80c5-73e3a6e2c965 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1956.422455] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-f20f8e4a-c0d7-4b59-80c5-73e3a6e2c965 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1956.423245] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-861229f6-6301-4871-a993-6f3caa1e59a9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.428962] env[62405]: DEBUG oslo_vmware.api [None req-f20f8e4a-c0d7-4b59-80c5-73e3a6e2c965 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Waiting for the task: (returnval){ [ 1956.428962] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52eb6e4a-ce49-22ab-eb4a-d78948ad4845" [ 1956.428962] env[62405]: _type = "Task" [ 1956.428962] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1956.437307] env[62405]: DEBUG oslo_vmware.api [None req-f20f8e4a-c0d7-4b59-80c5-73e3a6e2c965 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52eb6e4a-ce49-22ab-eb4a-d78948ad4845, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1956.669236] env[62405]: DEBUG nova.compute.manager [None req-958236ab-98a7-438b-8acf-f1d28209a6b1 tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] [instance: 1f8293f9-5fba-4bf4-bf7c-65837c1092a0] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1956.669664] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-958236ab-98a7-438b-8acf-f1d28209a6b1 tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] [instance: 1f8293f9-5fba-4bf4-bf7c-65837c1092a0] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1956.671173] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d44ca3a-3c6e-4f26-b7ed-c57d1b27acfa {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.684611] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-958236ab-98a7-438b-8acf-f1d28209a6b1 tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] [instance: 1f8293f9-5fba-4bf4-bf7c-65837c1092a0] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1956.685049] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-00ef4af3-29dc-482c-ad6e-b4d236003694 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.694410] env[62405]: DEBUG oslo_vmware.api [None req-958236ab-98a7-438b-8acf-f1d28209a6b1 tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Waiting for the task: (returnval){ [ 1956.694410] env[62405]: value = "task-1947912" [ 1956.694410] env[62405]: _type = "Task" [ 1956.694410] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1956.707751] env[62405]: DEBUG oslo_vmware.api [None req-958236ab-98a7-438b-8acf-f1d28209a6b1 tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Task: {'id': task-1947912, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1956.798106] env[62405]: DEBUG nova.scheduler.client.report [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1956.939892] env[62405]: DEBUG oslo_vmware.api [None req-f20f8e4a-c0d7-4b59-80c5-73e3a6e2c965 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52eb6e4a-ce49-22ab-eb4a-d78948ad4845, 'name': SearchDatastore_Task, 'duration_secs': 0.009173} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1956.940809] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4fba7dad-ede6-47fa-882f-80a6765eb89c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.945903] env[62405]: DEBUG oslo_vmware.api [None req-f20f8e4a-c0d7-4b59-80c5-73e3a6e2c965 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Waiting for the task: (returnval){ [ 1956.945903] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]523aee9f-aa49-5837-0024-ec3ca797bfde" [ 1956.945903] env[62405]: _type = "Task" [ 1956.945903] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1956.950116] env[62405]: DEBUG nova.compute.manager [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: ca0ff947-1ae0-4f19-ae71-0784f2c20ebe] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1956.960040] env[62405]: DEBUG oslo_vmware.api [None req-f20f8e4a-c0d7-4b59-80c5-73e3a6e2c965 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]523aee9f-aa49-5837-0024-ec3ca797bfde, 'name': SearchDatastore_Task, 'duration_secs': 0.009495} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1956.960040] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f20f8e4a-c0d7-4b59-80c5-73e3a6e2c965 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1956.960278] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-f20f8e4a-c0d7-4b59-80c5-73e3a6e2c965 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] a91a6d04-2ec0-4568-bdb3-732d148644de/e6bba7a8-c2de-41dc-871a-3859bba5f4f9-rescue.vmdk. {{(pid=62405) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1956.960487] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a5652ade-9481-448e-8efc-3f3b04696590 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.967210] env[62405]: DEBUG oslo_vmware.api [None req-f20f8e4a-c0d7-4b59-80c5-73e3a6e2c965 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Waiting for the task: (returnval){ [ 1956.967210] env[62405]: value = "task-1947913" [ 1956.967210] env[62405]: _type = "Task" [ 1956.967210] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1956.976688] env[62405]: DEBUG oslo_vmware.api [None req-f20f8e4a-c0d7-4b59-80c5-73e3a6e2c965 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1947913, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1956.982153] env[62405]: DEBUG nova.virt.hardware [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1956.982386] env[62405]: DEBUG nova.virt.hardware [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1956.982546] env[62405]: DEBUG nova.virt.hardware [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1956.982725] env[62405]: DEBUG nova.virt.hardware [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1956.982882] env[62405]: DEBUG nova.virt.hardware [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1956.983026] env[62405]: DEBUG nova.virt.hardware [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1956.983243] env[62405]: DEBUG nova.virt.hardware [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1956.983400] env[62405]: DEBUG nova.virt.hardware [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1956.983570] env[62405]: DEBUG nova.virt.hardware [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1956.983723] env[62405]: DEBUG nova.virt.hardware [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1956.983895] env[62405]: DEBUG nova.virt.hardware [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1956.984736] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a01c2f8-4963-4d48-b454-0ac41845afb3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.991957] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90cb1f87-839b-4def-8319-6a0c44ac4b31 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1957.207142] env[62405]: DEBUG oslo_vmware.api [None req-958236ab-98a7-438b-8acf-f1d28209a6b1 tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Task: {'id': task-1947912, 'name': PowerOffVM_Task, 'duration_secs': 0.259407} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1957.207691] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-958236ab-98a7-438b-8acf-f1d28209a6b1 tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] [instance: 1f8293f9-5fba-4bf4-bf7c-65837c1092a0] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1957.208216] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-958236ab-98a7-438b-8acf-f1d28209a6b1 tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] [instance: 1f8293f9-5fba-4bf4-bf7c-65837c1092a0] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1957.208481] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-de60e489-1f1d-4bd7-8c94-1f8190a32684 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1957.303492] env[62405]: DEBUG oslo_concurrency.lockutils [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.378s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1957.304458] env[62405]: DEBUG nova.compute.manager [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: 1b820a12-4ca5-4b89-9016-81ebac4f1c3b] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1957.308614] env[62405]: DEBUG oslo_concurrency.lockutils [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.816s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1957.310241] env[62405]: INFO nova.compute.claims [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1957.369293] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-958236ab-98a7-438b-8acf-f1d28209a6b1 tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] [instance: 1f8293f9-5fba-4bf4-bf7c-65837c1092a0] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1957.369554] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-958236ab-98a7-438b-8acf-f1d28209a6b1 tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] [instance: 1f8293f9-5fba-4bf4-bf7c-65837c1092a0] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1957.369710] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-958236ab-98a7-438b-8acf-f1d28209a6b1 tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Deleting the datastore file [datastore1] 1f8293f9-5fba-4bf4-bf7c-65837c1092a0 {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1957.369978] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-621f56a6-b1da-4e67-9f76-fff66c44bf70 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1957.378981] env[62405]: DEBUG oslo_vmware.api [None req-958236ab-98a7-438b-8acf-f1d28209a6b1 tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Waiting for the task: (returnval){ [ 1957.378981] env[62405]: value = "task-1947915" [ 1957.378981] env[62405]: _type = "Task" [ 1957.378981] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1957.389077] env[62405]: DEBUG oslo_vmware.api [None req-958236ab-98a7-438b-8acf-f1d28209a6b1 tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Task: {'id': task-1947915, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1957.477860] env[62405]: DEBUG oslo_vmware.api [None req-f20f8e4a-c0d7-4b59-80c5-73e3a6e2c965 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1947913, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1957.815502] env[62405]: DEBUG nova.compute.utils [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1957.820161] env[62405]: DEBUG nova.compute.manager [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: 1b820a12-4ca5-4b89-9016-81ebac4f1c3b] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1957.820485] env[62405]: DEBUG nova.network.neutron [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: 1b820a12-4ca5-4b89-9016-81ebac4f1c3b] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1957.869678] env[62405]: DEBUG nova.policy [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '766cbfd2f4944dc5b4bb3c210c4c6a95', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a949e89f885745acb15d0afd4893ce68', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1957.888421] env[62405]: DEBUG oslo_vmware.api [None req-958236ab-98a7-438b-8acf-f1d28209a6b1 tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Task: {'id': task-1947915, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.301422} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1957.888706] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-958236ab-98a7-438b-8acf-f1d28209a6b1 tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1957.888893] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-958236ab-98a7-438b-8acf-f1d28209a6b1 tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] [instance: 1f8293f9-5fba-4bf4-bf7c-65837c1092a0] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1957.889152] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-958236ab-98a7-438b-8acf-f1d28209a6b1 tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] [instance: 1f8293f9-5fba-4bf4-bf7c-65837c1092a0] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1957.889266] env[62405]: INFO nova.compute.manager [None req-958236ab-98a7-438b-8acf-f1d28209a6b1 tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] [instance: 1f8293f9-5fba-4bf4-bf7c-65837c1092a0] Took 1.22 seconds to destroy the instance on the hypervisor. [ 1957.889580] env[62405]: DEBUG oslo.service.loopingcall [None req-958236ab-98a7-438b-8acf-f1d28209a6b1 tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1957.889781] env[62405]: DEBUG nova.compute.manager [-] [instance: 1f8293f9-5fba-4bf4-bf7c-65837c1092a0] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1957.889882] env[62405]: DEBUG nova.network.neutron [-] [instance: 1f8293f9-5fba-4bf4-bf7c-65837c1092a0] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1957.910385] env[62405]: DEBUG nova.compute.manager [req-8031a1d3-1457-4aa1-87ee-0e445babd4be req-f4cc1337-1095-4351-a3ae-94eb2fb6ed4e service nova] [instance: ca0ff947-1ae0-4f19-ae71-0784f2c20ebe] Received event network-vif-plugged-9835277c-1b66-4088-ab4a-9d2b8e7e6266 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1957.910616] env[62405]: DEBUG oslo_concurrency.lockutils [req-8031a1d3-1457-4aa1-87ee-0e445babd4be req-f4cc1337-1095-4351-a3ae-94eb2fb6ed4e service nova] Acquiring lock "ca0ff947-1ae0-4f19-ae71-0784f2c20ebe-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1957.910916] env[62405]: DEBUG oslo_concurrency.lockutils [req-8031a1d3-1457-4aa1-87ee-0e445babd4be req-f4cc1337-1095-4351-a3ae-94eb2fb6ed4e service nova] Lock "ca0ff947-1ae0-4f19-ae71-0784f2c20ebe-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1957.911088] env[62405]: DEBUG oslo_concurrency.lockutils [req-8031a1d3-1457-4aa1-87ee-0e445babd4be req-f4cc1337-1095-4351-a3ae-94eb2fb6ed4e service nova] Lock "ca0ff947-1ae0-4f19-ae71-0784f2c20ebe-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1957.911195] env[62405]: DEBUG nova.compute.manager [req-8031a1d3-1457-4aa1-87ee-0e445babd4be req-f4cc1337-1095-4351-a3ae-94eb2fb6ed4e service nova] [instance: ca0ff947-1ae0-4f19-ae71-0784f2c20ebe] No waiting events found dispatching network-vif-plugged-9835277c-1b66-4088-ab4a-9d2b8e7e6266 {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1957.911340] env[62405]: WARNING nova.compute.manager [req-8031a1d3-1457-4aa1-87ee-0e445babd4be req-f4cc1337-1095-4351-a3ae-94eb2fb6ed4e service nova] [instance: ca0ff947-1ae0-4f19-ae71-0784f2c20ebe] Received unexpected event network-vif-plugged-9835277c-1b66-4088-ab4a-9d2b8e7e6266 for instance with vm_state building and task_state spawning. [ 1957.979412] env[62405]: DEBUG oslo_vmware.api [None req-f20f8e4a-c0d7-4b59-80c5-73e3a6e2c965 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1947913, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.539238} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1957.979776] env[62405]: INFO nova.virt.vmwareapi.ds_util [None req-f20f8e4a-c0d7-4b59-80c5-73e3a6e2c965 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] a91a6d04-2ec0-4568-bdb3-732d148644de/e6bba7a8-c2de-41dc-871a-3859bba5f4f9-rescue.vmdk. [ 1957.980598] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9395830c-bf3a-4385-9397-452980661a2a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.015683] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-f20f8e4a-c0d7-4b59-80c5-73e3a6e2c965 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: a91a6d04-2ec0-4568-bdb3-732d148644de] Reconfiguring VM instance instance-0000005f to attach disk [datastore1] a91a6d04-2ec0-4568-bdb3-732d148644de/e6bba7a8-c2de-41dc-871a-3859bba5f4f9-rescue.vmdk or device None with type thin {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1958.019180] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eb5b4200-97ce-4040-bd05-1a91128dd08b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.040633] env[62405]: DEBUG oslo_vmware.api [None req-f20f8e4a-c0d7-4b59-80c5-73e3a6e2c965 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Waiting for the task: (returnval){ [ 1958.040633] env[62405]: value = "task-1947916" [ 1958.040633] env[62405]: _type = "Task" [ 1958.040633] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1958.050312] env[62405]: DEBUG oslo_vmware.api [None req-f20f8e4a-c0d7-4b59-80c5-73e3a6e2c965 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1947916, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1958.069893] env[62405]: DEBUG nova.network.neutron [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: ca0ff947-1ae0-4f19-ae71-0784f2c20ebe] Successfully updated port: 9835277c-1b66-4088-ab4a-9d2b8e7e6266 {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1958.321179] env[62405]: DEBUG nova.compute.manager [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: 1b820a12-4ca5-4b89-9016-81ebac4f1c3b] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1958.360944] env[62405]: DEBUG nova.network.neutron [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: 1b820a12-4ca5-4b89-9016-81ebac4f1c3b] Successfully created port: 651237ae-dab1-45f6-9177-7e1f6cda628b {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1958.390213] env[62405]: DEBUG nova.compute.manager [req-602cb940-6468-4a33-bb1c-342df541ed47 req-8a282fcd-9bf8-455b-9d5f-c9efc31d1ca9 service nova] [instance: 1f8293f9-5fba-4bf4-bf7c-65837c1092a0] Received event network-vif-deleted-2e77c195-607d-43f7-a712-00157b5b9e01 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1958.390213] env[62405]: INFO nova.compute.manager [req-602cb940-6468-4a33-bb1c-342df541ed47 req-8a282fcd-9bf8-455b-9d5f-c9efc31d1ca9 service nova] [instance: 1f8293f9-5fba-4bf4-bf7c-65837c1092a0] Neutron deleted interface 2e77c195-607d-43f7-a712-00157b5b9e01; detaching it from the instance and deleting it from the info cache [ 1958.390213] env[62405]: DEBUG nova.network.neutron [req-602cb940-6468-4a33-bb1c-342df541ed47 req-8a282fcd-9bf8-455b-9d5f-c9efc31d1ca9 service nova] [instance: 1f8293f9-5fba-4bf4-bf7c-65837c1092a0] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1958.554580] env[62405]: DEBUG oslo_vmware.api [None req-f20f8e4a-c0d7-4b59-80c5-73e3a6e2c965 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1947916, 'name': ReconfigVM_Task, 'duration_secs': 0.450224} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1958.558462] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-f20f8e4a-c0d7-4b59-80c5-73e3a6e2c965 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: a91a6d04-2ec0-4568-bdb3-732d148644de] Reconfigured VM instance instance-0000005f to attach disk [datastore1] a91a6d04-2ec0-4568-bdb3-732d148644de/e6bba7a8-c2de-41dc-871a-3859bba5f4f9-rescue.vmdk or device None with type thin {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1958.559541] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62e9fd1c-031a-40f4-b015-b057b8f485c1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.583310] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Acquiring lock "refresh_cache-ca0ff947-1ae0-4f19-ae71-0784f2c20ebe" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1958.583452] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Acquired lock "refresh_cache-ca0ff947-1ae0-4f19-ae71-0784f2c20ebe" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1958.583604] env[62405]: DEBUG nova.network.neutron [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: ca0ff947-1ae0-4f19-ae71-0784f2c20ebe] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1958.594109] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b7cc470b-0920-47cd-964b-282e18c350cd {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.611662] env[62405]: DEBUG oslo_vmware.api [None req-f20f8e4a-c0d7-4b59-80c5-73e3a6e2c965 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Waiting for the task: (returnval){ [ 1958.611662] env[62405]: value = "task-1947917" [ 1958.611662] env[62405]: _type = "Task" [ 1958.611662] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1958.621980] env[62405]: DEBUG oslo_vmware.api [None req-f20f8e4a-c0d7-4b59-80c5-73e3a6e2c965 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1947917, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1958.695093] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d723ebde-b6d6-40af-acdc-b5505b174096 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.704806] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbe16437-d07f-4ec5-869b-70ca662f3355 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.736085] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2765354f-5d8f-4751-a6a7-abd188561171 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.745048] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99f199a3-1d94-418c-b2b1-3f46e03f3e07 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.758065] env[62405]: DEBUG nova.compute.provider_tree [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1958.860871] env[62405]: DEBUG nova.network.neutron [-] [instance: 1f8293f9-5fba-4bf4-bf7c-65837c1092a0] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1958.892230] env[62405]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-08f93304-f229-41d7-b89d-6b6e865b3002 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.901635] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a3b41cf-2ffb-4991-9282-46954d9dc3f7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.934689] env[62405]: DEBUG nova.compute.manager [req-602cb940-6468-4a33-bb1c-342df541ed47 req-8a282fcd-9bf8-455b-9d5f-c9efc31d1ca9 service nova] [instance: 1f8293f9-5fba-4bf4-bf7c-65837c1092a0] Detach interface failed, port_id=2e77c195-607d-43f7-a712-00157b5b9e01, reason: Instance 1f8293f9-5fba-4bf4-bf7c-65837c1092a0 could not be found. {{(pid=62405) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11483}} [ 1959.123674] env[62405]: DEBUG oslo_vmware.api [None req-f20f8e4a-c0d7-4b59-80c5-73e3a6e2c965 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1947917, 'name': ReconfigVM_Task, 'duration_secs': 0.169943} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1959.124630] env[62405]: DEBUG nova.network.neutron [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: ca0ff947-1ae0-4f19-ae71-0784f2c20ebe] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1959.126468] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-f20f8e4a-c0d7-4b59-80c5-73e3a6e2c965 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: a91a6d04-2ec0-4568-bdb3-732d148644de] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1959.126756] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-50f4195c-77f9-4c2f-afee-26f493e2ab5d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.137934] env[62405]: DEBUG oslo_vmware.api [None req-f20f8e4a-c0d7-4b59-80c5-73e3a6e2c965 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Waiting for the task: (returnval){ [ 1959.137934] env[62405]: value = "task-1947918" [ 1959.137934] env[62405]: _type = "Task" [ 1959.137934] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1959.146846] env[62405]: DEBUG oslo_vmware.api [None req-f20f8e4a-c0d7-4b59-80c5-73e3a6e2c965 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1947918, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1959.261571] env[62405]: DEBUG nova.scheduler.client.report [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1959.278702] env[62405]: DEBUG nova.network.neutron [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: ca0ff947-1ae0-4f19-ae71-0784f2c20ebe] Updating instance_info_cache with network_info: [{"id": "9835277c-1b66-4088-ab4a-9d2b8e7e6266", "address": "fa:16:3e:1d:dd:20", "network": {"id": "b8b3a499-0f88-47ce-900b-2cc51e1d123f", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-5251275-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1d2ff9a8cb1840889a4a2a87c663f59e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6399297e-11b6-47b0-9a9f-712bb90b6ea1", "external-id": "nsx-vlan-transportzone-213", "segmentation_id": 213, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9835277c-1b", "ovs_interfaceid": "9835277c-1b66-4088-ab4a-9d2b8e7e6266", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1959.334067] env[62405]: DEBUG nova.compute.manager [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: 1b820a12-4ca5-4b89-9016-81ebac4f1c3b] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1959.362154] env[62405]: DEBUG nova.virt.hardware [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1959.362423] env[62405]: DEBUG nova.virt.hardware [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1959.362583] env[62405]: DEBUG nova.virt.hardware [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1959.362762] env[62405]: DEBUG nova.virt.hardware [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1959.362906] env[62405]: DEBUG nova.virt.hardware [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1959.363065] env[62405]: DEBUG nova.virt.hardware [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1959.363279] env[62405]: DEBUG nova.virt.hardware [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1959.363437] env[62405]: DEBUG nova.virt.hardware [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1959.363605] env[62405]: DEBUG nova.virt.hardware [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1959.363795] env[62405]: DEBUG nova.virt.hardware [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1959.363962] env[62405]: DEBUG nova.virt.hardware [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1959.364408] env[62405]: INFO nova.compute.manager [-] [instance: 1f8293f9-5fba-4bf4-bf7c-65837c1092a0] Took 1.47 seconds to deallocate network for instance. [ 1959.365198] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bcd0e41-6052-40a1-8883-c063681fff34 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.375533] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7de1bc03-cbfe-4e30-8130-6458ceb1621d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.648967] env[62405]: DEBUG oslo_vmware.api [None req-f20f8e4a-c0d7-4b59-80c5-73e3a6e2c965 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1947918, 'name': PowerOnVM_Task, 'duration_secs': 0.430254} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1959.649289] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-f20f8e4a-c0d7-4b59-80c5-73e3a6e2c965 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: a91a6d04-2ec0-4568-bdb3-732d148644de] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1959.652450] env[62405]: DEBUG nova.compute.manager [None req-f20f8e4a-c0d7-4b59-80c5-73e3a6e2c965 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: a91a6d04-2ec0-4568-bdb3-732d148644de] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1959.653299] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2775f577-31b1-446b-a744-c15d7eda20d9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.769568] env[62405]: DEBUG oslo_concurrency.lockutils [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.461s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1959.770159] env[62405]: DEBUG nova.compute.manager [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1959.772952] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.110s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1959.774775] env[62405]: INFO nova.compute.claims [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 14512ed2-9eae-4753-b83c-8c0d0d5d9432] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1959.781929] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Releasing lock "refresh_cache-ca0ff947-1ae0-4f19-ae71-0784f2c20ebe" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1959.782362] env[62405]: DEBUG nova.compute.manager [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: ca0ff947-1ae0-4f19-ae71-0784f2c20ebe] Instance network_info: |[{"id": "9835277c-1b66-4088-ab4a-9d2b8e7e6266", "address": "fa:16:3e:1d:dd:20", "network": {"id": "b8b3a499-0f88-47ce-900b-2cc51e1d123f", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-5251275-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1d2ff9a8cb1840889a4a2a87c663f59e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6399297e-11b6-47b0-9a9f-712bb90b6ea1", "external-id": "nsx-vlan-transportzone-213", "segmentation_id": 213, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9835277c-1b", "ovs_interfaceid": "9835277c-1b66-4088-ab4a-9d2b8e7e6266", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1959.782840] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: ca0ff947-1ae0-4f19-ae71-0784f2c20ebe] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1d:dd:20', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6399297e-11b6-47b0-9a9f-712bb90b6ea1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9835277c-1b66-4088-ab4a-9d2b8e7e6266', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1959.790604] env[62405]: DEBUG oslo.service.loopingcall [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1959.791495] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ca0ff947-1ae0-4f19-ae71-0784f2c20ebe] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1959.791495] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1c4f7eba-0827-42c8-86f9-874dc6e82420 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.809724] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1959.809724] env[62405]: value = "task-1947919" [ 1959.809724] env[62405]: _type = "Task" [ 1959.809724] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1959.818454] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947919, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1959.875964] env[62405]: DEBUG oslo_concurrency.lockutils [None req-958236ab-98a7-438b-8acf-f1d28209a6b1 tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1960.046742] env[62405]: DEBUG nova.compute.manager [req-80306d6f-7190-46c5-901e-0c24a673a20e req-cc2c3578-9239-42b8-9a18-cce2837ad7fe service nova] [instance: ca0ff947-1ae0-4f19-ae71-0784f2c20ebe] Received event network-changed-9835277c-1b66-4088-ab4a-9d2b8e7e6266 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1960.046742] env[62405]: DEBUG nova.compute.manager [req-80306d6f-7190-46c5-901e-0c24a673a20e req-cc2c3578-9239-42b8-9a18-cce2837ad7fe service nova] [instance: ca0ff947-1ae0-4f19-ae71-0784f2c20ebe] Refreshing instance network info cache due to event network-changed-9835277c-1b66-4088-ab4a-9d2b8e7e6266. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1960.047038] env[62405]: DEBUG oslo_concurrency.lockutils [req-80306d6f-7190-46c5-901e-0c24a673a20e req-cc2c3578-9239-42b8-9a18-cce2837ad7fe service nova] Acquiring lock "refresh_cache-ca0ff947-1ae0-4f19-ae71-0784f2c20ebe" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1960.047238] env[62405]: DEBUG oslo_concurrency.lockutils [req-80306d6f-7190-46c5-901e-0c24a673a20e req-cc2c3578-9239-42b8-9a18-cce2837ad7fe service nova] Acquired lock "refresh_cache-ca0ff947-1ae0-4f19-ae71-0784f2c20ebe" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1960.047372] env[62405]: DEBUG nova.network.neutron [req-80306d6f-7190-46c5-901e-0c24a673a20e req-cc2c3578-9239-42b8-9a18-cce2837ad7fe service nova] [instance: ca0ff947-1ae0-4f19-ae71-0784f2c20ebe] Refreshing network info cache for port 9835277c-1b66-4088-ab4a-9d2b8e7e6266 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1960.280418] env[62405]: DEBUG nova.compute.utils [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1960.284642] env[62405]: DEBUG nova.compute.manager [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1960.285513] env[62405]: DEBUG nova.network.neutron [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1960.320310] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947919, 'name': CreateVM_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1960.347111] env[62405]: DEBUG nova.policy [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '13540c2dbc2b43bcb151ec7b5894904c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ba9083cddcc24345b6ea5d2cbbbec5ba', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1960.418442] env[62405]: DEBUG nova.compute.manager [req-8963dab5-92a5-4b85-b461-e72c1117724e req-40d9ff0d-d8b8-41ee-bd60-777e42efc734 service nova] [instance: 1b820a12-4ca5-4b89-9016-81ebac4f1c3b] Received event network-vif-plugged-651237ae-dab1-45f6-9177-7e1f6cda628b {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1960.418695] env[62405]: DEBUG oslo_concurrency.lockutils [req-8963dab5-92a5-4b85-b461-e72c1117724e req-40d9ff0d-d8b8-41ee-bd60-777e42efc734 service nova] Acquiring lock "1b820a12-4ca5-4b89-9016-81ebac4f1c3b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1960.418908] env[62405]: DEBUG oslo_concurrency.lockutils [req-8963dab5-92a5-4b85-b461-e72c1117724e req-40d9ff0d-d8b8-41ee-bd60-777e42efc734 service nova] Lock "1b820a12-4ca5-4b89-9016-81ebac4f1c3b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1960.419091] env[62405]: DEBUG oslo_concurrency.lockutils [req-8963dab5-92a5-4b85-b461-e72c1117724e req-40d9ff0d-d8b8-41ee-bd60-777e42efc734 service nova] Lock "1b820a12-4ca5-4b89-9016-81ebac4f1c3b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1960.419334] env[62405]: DEBUG nova.compute.manager [req-8963dab5-92a5-4b85-b461-e72c1117724e req-40d9ff0d-d8b8-41ee-bd60-777e42efc734 service nova] [instance: 1b820a12-4ca5-4b89-9016-81ebac4f1c3b] No waiting events found dispatching network-vif-plugged-651237ae-dab1-45f6-9177-7e1f6cda628b {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1960.419443] env[62405]: WARNING nova.compute.manager [req-8963dab5-92a5-4b85-b461-e72c1117724e req-40d9ff0d-d8b8-41ee-bd60-777e42efc734 service nova] [instance: 1b820a12-4ca5-4b89-9016-81ebac4f1c3b] Received unexpected event network-vif-plugged-651237ae-dab1-45f6-9177-7e1f6cda628b for instance with vm_state building and task_state spawning. [ 1960.483240] env[62405]: DEBUG nova.network.neutron [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: 1b820a12-4ca5-4b89-9016-81ebac4f1c3b] Successfully updated port: 651237ae-dab1-45f6-9177-7e1f6cda628b {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1960.689298] env[62405]: INFO nova.compute.manager [None req-d775358f-0074-4a15-843d-e9e4bc2443ec tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: a91a6d04-2ec0-4568-bdb3-732d148644de] Unrescuing [ 1960.689648] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d775358f-0074-4a15-843d-e9e4bc2443ec tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Acquiring lock "refresh_cache-a91a6d04-2ec0-4568-bdb3-732d148644de" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1960.689815] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d775358f-0074-4a15-843d-e9e4bc2443ec tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Acquired lock "refresh_cache-a91a6d04-2ec0-4568-bdb3-732d148644de" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1960.689986] env[62405]: DEBUG nova.network.neutron [None req-d775358f-0074-4a15-843d-e9e4bc2443ec tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: a91a6d04-2ec0-4568-bdb3-732d148644de] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1960.788101] env[62405]: DEBUG nova.compute.manager [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1960.824094] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947919, 'name': CreateVM_Task, 'duration_secs': 0.56849} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1960.824094] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ca0ff947-1ae0-4f19-ae71-0784f2c20ebe] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1960.824659] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1960.824821] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1960.828023] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1960.828023] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e326815e-ad7a-4fd8-b5fd-12ccfe1faa14 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1960.831955] env[62405]: DEBUG oslo_vmware.api [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Waiting for the task: (returnval){ [ 1960.831955] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]525cf1d2-1ef5-95dd-236e-f3dd204bb772" [ 1960.831955] env[62405]: _type = "Task" [ 1960.831955] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1960.847062] env[62405]: DEBUG oslo_vmware.api [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]525cf1d2-1ef5-95dd-236e-f3dd204bb772, 'name': SearchDatastore_Task, 'duration_secs': 0.010531} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1960.847760] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1960.848033] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: ca0ff947-1ae0-4f19-ae71-0784f2c20ebe] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1960.848277] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1960.848462] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1960.848635] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1960.849101] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3e039ecc-23d5-46a6-ac46-899bb59a257a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1960.857869] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1960.858060] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1960.859082] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2ece99de-f853-4cb7-8d8a-9ad1364785d3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1960.871199] env[62405]: DEBUG oslo_vmware.api [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Waiting for the task: (returnval){ [ 1960.871199] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52b3d8ca-eabe-7f67-3e34-bfdb1cd930c6" [ 1960.871199] env[62405]: _type = "Task" [ 1960.871199] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1960.881163] env[62405]: DEBUG oslo_vmware.api [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52b3d8ca-eabe-7f67-3e34-bfdb1cd930c6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1960.986546] env[62405]: DEBUG oslo_concurrency.lockutils [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Acquiring lock "refresh_cache-1b820a12-4ca5-4b89-9016-81ebac4f1c3b" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1960.986703] env[62405]: DEBUG oslo_concurrency.lockutils [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Acquired lock "refresh_cache-1b820a12-4ca5-4b89-9016-81ebac4f1c3b" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1960.986841] env[62405]: DEBUG nova.network.neutron [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: 1b820a12-4ca5-4b89-9016-81ebac4f1c3b] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1960.988648] env[62405]: DEBUG nova.network.neutron [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] Successfully created port: 3189d804-1d8d-4356-bbf0-e0bbda0a2d32 {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1961.093957] env[62405]: DEBUG nova.network.neutron [req-80306d6f-7190-46c5-901e-0c24a673a20e req-cc2c3578-9239-42b8-9a18-cce2837ad7fe service nova] [instance: ca0ff947-1ae0-4f19-ae71-0784f2c20ebe] Updated VIF entry in instance network info cache for port 9835277c-1b66-4088-ab4a-9d2b8e7e6266. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1961.094338] env[62405]: DEBUG nova.network.neutron [req-80306d6f-7190-46c5-901e-0c24a673a20e req-cc2c3578-9239-42b8-9a18-cce2837ad7fe service nova] [instance: ca0ff947-1ae0-4f19-ae71-0784f2c20ebe] Updating instance_info_cache with network_info: [{"id": "9835277c-1b66-4088-ab4a-9d2b8e7e6266", "address": "fa:16:3e:1d:dd:20", "network": {"id": "b8b3a499-0f88-47ce-900b-2cc51e1d123f", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-5251275-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1d2ff9a8cb1840889a4a2a87c663f59e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6399297e-11b6-47b0-9a9f-712bb90b6ea1", "external-id": "nsx-vlan-transportzone-213", "segmentation_id": 213, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9835277c-1b", "ovs_interfaceid": "9835277c-1b66-4088-ab4a-9d2b8e7e6266", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1961.104428] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23586fea-bfab-4fe4-98cd-80f69bafe2c7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.112907] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18d07e8e-9a1d-4c88-8db0-a1f7a6d44c6d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.154720] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df124db4-d43e-4193-bc4d-0c326e9f7787 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.163984] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34dd6adf-c61d-45ea-b744-cc3b36982149 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.179952] env[62405]: DEBUG nova.compute.provider_tree [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1961.381218] env[62405]: DEBUG oslo_vmware.api [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52b3d8ca-eabe-7f67-3e34-bfdb1cd930c6, 'name': SearchDatastore_Task, 'duration_secs': 0.008378} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1961.381982] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5368f36c-a9e1-4d11-9831-fdf2f385d9a1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.388472] env[62405]: DEBUG oslo_vmware.api [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Waiting for the task: (returnval){ [ 1961.388472] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]520ade5c-8d38-3efb-7fc8-0bc623cb20aa" [ 1961.388472] env[62405]: _type = "Task" [ 1961.388472] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1961.396272] env[62405]: DEBUG oslo_vmware.api [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]520ade5c-8d38-3efb-7fc8-0bc623cb20aa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1961.464730] env[62405]: DEBUG nova.network.neutron [None req-d775358f-0074-4a15-843d-e9e4bc2443ec tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: a91a6d04-2ec0-4568-bdb3-732d148644de] Updating instance_info_cache with network_info: [{"id": "744277fe-5ae4-47a1-8b6e-f92b066ed2a3", "address": "fa:16:3e:e9:00:52", "network": {"id": "bb161d6c-1986-486e-a6b7-5b1dacc985ee", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-590658377-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7541d8c77a3f434094bc30a4d402bfcb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap744277fe-5a", "ovs_interfaceid": "744277fe-5ae4-47a1-8b6e-f92b066ed2a3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1961.521065] env[62405]: DEBUG nova.network.neutron [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: 1b820a12-4ca5-4b89-9016-81ebac4f1c3b] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1961.596858] env[62405]: DEBUG oslo_concurrency.lockutils [req-80306d6f-7190-46c5-901e-0c24a673a20e req-cc2c3578-9239-42b8-9a18-cce2837ad7fe service nova] Releasing lock "refresh_cache-ca0ff947-1ae0-4f19-ae71-0784f2c20ebe" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1961.663654] env[62405]: DEBUG nova.network.neutron [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: 1b820a12-4ca5-4b89-9016-81ebac4f1c3b] Updating instance_info_cache with network_info: [{"id": "651237ae-dab1-45f6-9177-7e1f6cda628b", "address": "fa:16:3e:c8:fd:1f", "network": {"id": "e785f241-c0f9-4e7b-978a-316f93e62a7a", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-442287566-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a949e89f885745acb15d0afd4893ce68", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cf63c3c8-d774-4b81-9b12-848612a96076", "external-id": "nsx-vlan-transportzone-315", "segmentation_id": 315, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap651237ae-da", "ovs_interfaceid": "651237ae-dab1-45f6-9177-7e1f6cda628b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1961.683725] env[62405]: DEBUG nova.scheduler.client.report [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1961.801432] env[62405]: DEBUG nova.compute.manager [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1961.830658] env[62405]: DEBUG nova.virt.hardware [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1961.831874] env[62405]: DEBUG nova.virt.hardware [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1961.831874] env[62405]: DEBUG nova.virt.hardware [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1961.831874] env[62405]: DEBUG nova.virt.hardware [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1961.832075] env[62405]: DEBUG nova.virt.hardware [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1961.832159] env[62405]: DEBUG nova.virt.hardware [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1961.832422] env[62405]: DEBUG nova.virt.hardware [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1961.832613] env[62405]: DEBUG nova.virt.hardware [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1961.832835] env[62405]: DEBUG nova.virt.hardware [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1961.833050] env[62405]: DEBUG nova.virt.hardware [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1961.833259] env[62405]: DEBUG nova.virt.hardware [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1961.834585] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f53e9079-aff4-4028-bcf4-4197526bde38 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.843423] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dd7c120-ecbb-4cce-a0d8-991e30f67185 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.899749] env[62405]: DEBUG oslo_vmware.api [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]520ade5c-8d38-3efb-7fc8-0bc623cb20aa, 'name': SearchDatastore_Task, 'duration_secs': 0.010079} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1961.900049] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1961.900321] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] ca0ff947-1ae0-4f19-ae71-0784f2c20ebe/ca0ff947-1ae0-4f19-ae71-0784f2c20ebe.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1961.900587] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fc24b3fa-bc6a-4ae8-a4a7-c13eb021e53f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.907597] env[62405]: DEBUG oslo_vmware.api [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Waiting for the task: (returnval){ [ 1961.907597] env[62405]: value = "task-1947920" [ 1961.907597] env[62405]: _type = "Task" [ 1961.907597] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1961.915782] env[62405]: DEBUG oslo_vmware.api [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1947920, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1961.967464] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d775358f-0074-4a15-843d-e9e4bc2443ec tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Releasing lock "refresh_cache-a91a6d04-2ec0-4568-bdb3-732d148644de" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1961.967918] env[62405]: DEBUG nova.objects.instance [None req-d775358f-0074-4a15-843d-e9e4bc2443ec tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Lazy-loading 'flavor' on Instance uuid a91a6d04-2ec0-4568-bdb3-732d148644de {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1962.166326] env[62405]: DEBUG oslo_concurrency.lockutils [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Releasing lock "refresh_cache-1b820a12-4ca5-4b89-9016-81ebac4f1c3b" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1962.166711] env[62405]: DEBUG nova.compute.manager [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: 1b820a12-4ca5-4b89-9016-81ebac4f1c3b] Instance network_info: |[{"id": "651237ae-dab1-45f6-9177-7e1f6cda628b", "address": "fa:16:3e:c8:fd:1f", "network": {"id": "e785f241-c0f9-4e7b-978a-316f93e62a7a", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-442287566-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a949e89f885745acb15d0afd4893ce68", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cf63c3c8-d774-4b81-9b12-848612a96076", "external-id": "nsx-vlan-transportzone-315", "segmentation_id": 315, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap651237ae-da", "ovs_interfaceid": "651237ae-dab1-45f6-9177-7e1f6cda628b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1962.167177] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: 1b820a12-4ca5-4b89-9016-81ebac4f1c3b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c8:fd:1f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cf63c3c8-d774-4b81-9b12-848612a96076', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '651237ae-dab1-45f6-9177-7e1f6cda628b', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1962.176166] env[62405]: DEBUG oslo.service.loopingcall [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1962.176724] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1b820a12-4ca5-4b89-9016-81ebac4f1c3b] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1962.176816] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-40633907-b4c6-436b-b64b-c7e1bd537a19 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1962.195572] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.422s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1962.196268] env[62405]: DEBUG nova.compute.manager [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 14512ed2-9eae-4753-b83c-8c0d0d5d9432] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1962.199748] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3fd5af0a-10f8-4840-96c6-11ede660bacc tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.367s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1962.200117] env[62405]: DEBUG nova.objects.instance [None req-3fd5af0a-10f8-4840-96c6-11ede660bacc tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Lazy-loading 'resources' on Instance uuid 86378df0-a658-427d-aca5-de25f84eb28b {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1962.208740] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1962.208740] env[62405]: value = "task-1947921" [ 1962.208740] env[62405]: _type = "Task" [ 1962.208740] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1962.222196] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947921, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1962.417631] env[62405]: DEBUG oslo_vmware.api [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1947920, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1962.476384] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ceb5ff35-9d57-4c93-b5e8-9e34932ac806 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1962.500559] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-d775358f-0074-4a15-843d-e9e4bc2443ec tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: a91a6d04-2ec0-4568-bdb3-732d148644de] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1962.500559] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f48f8ca6-529c-4258-a8ac-f79e897e356e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1962.502433] env[62405]: DEBUG nova.compute.manager [req-b521796e-3eaa-4b3c-83fe-3c0f45075d2d req-b72520cd-cf2a-4619-a1c9-baab7f26895e service nova] [instance: 1b820a12-4ca5-4b89-9016-81ebac4f1c3b] Received event network-changed-651237ae-dab1-45f6-9177-7e1f6cda628b {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1962.502620] env[62405]: DEBUG nova.compute.manager [req-b521796e-3eaa-4b3c-83fe-3c0f45075d2d req-b72520cd-cf2a-4619-a1c9-baab7f26895e service nova] [instance: 1b820a12-4ca5-4b89-9016-81ebac4f1c3b] Refreshing instance network info cache due to event network-changed-651237ae-dab1-45f6-9177-7e1f6cda628b. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1962.502835] env[62405]: DEBUG oslo_concurrency.lockutils [req-b521796e-3eaa-4b3c-83fe-3c0f45075d2d req-b72520cd-cf2a-4619-a1c9-baab7f26895e service nova] Acquiring lock "refresh_cache-1b820a12-4ca5-4b89-9016-81ebac4f1c3b" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1962.502978] env[62405]: DEBUG oslo_concurrency.lockutils [req-b521796e-3eaa-4b3c-83fe-3c0f45075d2d req-b72520cd-cf2a-4619-a1c9-baab7f26895e service nova] Acquired lock "refresh_cache-1b820a12-4ca5-4b89-9016-81ebac4f1c3b" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1962.503153] env[62405]: DEBUG nova.network.neutron [req-b521796e-3eaa-4b3c-83fe-3c0f45075d2d req-b72520cd-cf2a-4619-a1c9-baab7f26895e service nova] [instance: 1b820a12-4ca5-4b89-9016-81ebac4f1c3b] Refreshing network info cache for port 651237ae-dab1-45f6-9177-7e1f6cda628b {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1962.510754] env[62405]: DEBUG oslo_vmware.api [None req-d775358f-0074-4a15-843d-e9e4bc2443ec tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Waiting for the task: (returnval){ [ 1962.510754] env[62405]: value = "task-1947922" [ 1962.510754] env[62405]: _type = "Task" [ 1962.510754] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1962.519162] env[62405]: DEBUG oslo_vmware.api [None req-d775358f-0074-4a15-843d-e9e4bc2443ec tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1947922, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1962.674116] env[62405]: DEBUG nova.network.neutron [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] Successfully updated port: 3189d804-1d8d-4356-bbf0-e0bbda0a2d32 {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1962.704049] env[62405]: DEBUG nova.compute.utils [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1962.708919] env[62405]: DEBUG nova.compute.manager [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 14512ed2-9eae-4753-b83c-8c0d0d5d9432] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1962.709298] env[62405]: DEBUG nova.network.neutron [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 14512ed2-9eae-4753-b83c-8c0d0d5d9432] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1962.719832] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947921, 'name': CreateVM_Task} progress is 25%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1962.755018] env[62405]: DEBUG nova.policy [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '96a739701a824313b30b0d214f43757b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6014bab6bc9a4b059bab88e44b31f446', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1962.920879] env[62405]: DEBUG oslo_vmware.api [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1947920, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1963.024254] env[62405]: DEBUG oslo_vmware.api [None req-d775358f-0074-4a15-843d-e9e4bc2443ec tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1947922, 'name': PowerOffVM_Task, 'duration_secs': 0.222365} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1963.024254] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-d775358f-0074-4a15-843d-e9e4bc2443ec tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: a91a6d04-2ec0-4568-bdb3-732d148644de] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1963.031569] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-d775358f-0074-4a15-843d-e9e4bc2443ec tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: a91a6d04-2ec0-4568-bdb3-732d148644de] Reconfiguring VM instance instance-0000005f to detach disk 2001 {{(pid=62405) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1963.033361] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8a232280-c57f-421a-80aa-a8c0d52a4d82 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.048738] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-530ca168-2bfb-4a50-a09a-c69b257aa817 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.061622] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73127e59-8c97-48c0-a120-232d415c35e3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.064763] env[62405]: DEBUG oslo_vmware.api [None req-d775358f-0074-4a15-843d-e9e4bc2443ec tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Waiting for the task: (returnval){ [ 1963.064763] env[62405]: value = "task-1947923" [ 1963.064763] env[62405]: _type = "Task" [ 1963.064763] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1963.095234] env[62405]: DEBUG nova.network.neutron [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 14512ed2-9eae-4753-b83c-8c0d0d5d9432] Successfully created port: fb939ba3-6c42-4855-80a4-e268dd0bbe54 {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1963.099357] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd4a768a-3771-4ee2-ac59-be9e6326482b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.104407] env[62405]: DEBUG oslo_vmware.api [None req-d775358f-0074-4a15-843d-e9e4bc2443ec tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1947923, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1963.112310] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c65bcd19-4059-499a-8b0b-2635d3a27cb2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.128671] env[62405]: DEBUG nova.compute.provider_tree [None req-3fd5af0a-10f8-4840-96c6-11ede660bacc tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1963.177416] env[62405]: DEBUG oslo_concurrency.lockutils [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquiring lock "refresh_cache-15718289-5c19-4c2d-a9d8-d30ce0d63c68" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1963.177578] env[62405]: DEBUG oslo_concurrency.lockutils [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquired lock "refresh_cache-15718289-5c19-4c2d-a9d8-d30ce0d63c68" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1963.177739] env[62405]: DEBUG nova.network.neutron [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1963.209502] env[62405]: DEBUG nova.compute.manager [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 14512ed2-9eae-4753-b83c-8c0d0d5d9432] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1963.223979] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947921, 'name': CreateVM_Task, 'duration_secs': 0.835648} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1963.224179] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1b820a12-4ca5-4b89-9016-81ebac4f1c3b] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1963.224839] env[62405]: DEBUG oslo_concurrency.lockutils [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1963.225012] env[62405]: DEBUG oslo_concurrency.lockutils [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1963.225335] env[62405]: DEBUG oslo_concurrency.lockutils [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1963.225591] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8106734b-afb0-4169-ad17-5c7f2787214a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.230322] env[62405]: DEBUG oslo_vmware.api [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Waiting for the task: (returnval){ [ 1963.230322] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]523b6bef-bbf3-9e05-ec5d-468300ad35ad" [ 1963.230322] env[62405]: _type = "Task" [ 1963.230322] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1963.241174] env[62405]: DEBUG oslo_vmware.api [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]523b6bef-bbf3-9e05-ec5d-468300ad35ad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1963.351229] env[62405]: DEBUG nova.network.neutron [req-b521796e-3eaa-4b3c-83fe-3c0f45075d2d req-b72520cd-cf2a-4619-a1c9-baab7f26895e service nova] [instance: 1b820a12-4ca5-4b89-9016-81ebac4f1c3b] Updated VIF entry in instance network info cache for port 651237ae-dab1-45f6-9177-7e1f6cda628b. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1963.351608] env[62405]: DEBUG nova.network.neutron [req-b521796e-3eaa-4b3c-83fe-3c0f45075d2d req-b72520cd-cf2a-4619-a1c9-baab7f26895e service nova] [instance: 1b820a12-4ca5-4b89-9016-81ebac4f1c3b] Updating instance_info_cache with network_info: [{"id": "651237ae-dab1-45f6-9177-7e1f6cda628b", "address": "fa:16:3e:c8:fd:1f", "network": {"id": "e785f241-c0f9-4e7b-978a-316f93e62a7a", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-442287566-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a949e89f885745acb15d0afd4893ce68", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cf63c3c8-d774-4b81-9b12-848612a96076", "external-id": "nsx-vlan-transportzone-315", "segmentation_id": 315, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap651237ae-da", "ovs_interfaceid": "651237ae-dab1-45f6-9177-7e1f6cda628b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1963.420333] env[62405]: DEBUG oslo_vmware.api [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1947920, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.470108} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1963.421244] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] ca0ff947-1ae0-4f19-ae71-0784f2c20ebe/ca0ff947-1ae0-4f19-ae71-0784f2c20ebe.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1963.422816] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: ca0ff947-1ae0-4f19-ae71-0784f2c20ebe] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1963.422816] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e94f84b3-9ac7-4133-a37d-b34bffd311a5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.434504] env[62405]: DEBUG oslo_vmware.api [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Waiting for the task: (returnval){ [ 1963.434504] env[62405]: value = "task-1947924" [ 1963.434504] env[62405]: _type = "Task" [ 1963.434504] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1963.443957] env[62405]: DEBUG oslo_vmware.api [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1947924, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1963.575895] env[62405]: DEBUG oslo_vmware.api [None req-d775358f-0074-4a15-843d-e9e4bc2443ec tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1947923, 'name': ReconfigVM_Task, 'duration_secs': 0.226562} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1963.576271] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-d775358f-0074-4a15-843d-e9e4bc2443ec tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: a91a6d04-2ec0-4568-bdb3-732d148644de] Reconfigured VM instance instance-0000005f to detach disk 2001 {{(pid=62405) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1963.576508] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-d775358f-0074-4a15-843d-e9e4bc2443ec tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: a91a6d04-2ec0-4568-bdb3-732d148644de] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1963.576810] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fb091620-8c7d-4f39-ad84-45da344352a7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.583757] env[62405]: DEBUG oslo_vmware.api [None req-d775358f-0074-4a15-843d-e9e4bc2443ec tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Waiting for the task: (returnval){ [ 1963.583757] env[62405]: value = "task-1947925" [ 1963.583757] env[62405]: _type = "Task" [ 1963.583757] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1963.593481] env[62405]: DEBUG oslo_vmware.api [None req-d775358f-0074-4a15-843d-e9e4bc2443ec tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1947925, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1963.632542] env[62405]: DEBUG nova.scheduler.client.report [None req-3fd5af0a-10f8-4840-96c6-11ede660bacc tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1963.710220] env[62405]: DEBUG nova.network.neutron [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1963.741434] env[62405]: DEBUG oslo_vmware.api [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]523b6bef-bbf3-9e05-ec5d-468300ad35ad, 'name': SearchDatastore_Task, 'duration_secs': 0.009435} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1963.741737] env[62405]: DEBUG oslo_concurrency.lockutils [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1963.742186] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: 1b820a12-4ca5-4b89-9016-81ebac4f1c3b] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1963.742289] env[62405]: DEBUG oslo_concurrency.lockutils [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1963.743305] env[62405]: DEBUG oslo_concurrency.lockutils [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1963.743305] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1963.743305] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dfce555a-d58d-4a7b-aa8e-dc32d7862287 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.751034] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1963.752027] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1963.752027] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-af10ed7c-6c29-4bd7-b6f0-ad0c54f13a7e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.757939] env[62405]: DEBUG oslo_vmware.api [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Waiting for the task: (returnval){ [ 1963.757939] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]522b7fb1-4459-8a3c-56b4-caa6adac96d0" [ 1963.757939] env[62405]: _type = "Task" [ 1963.757939] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1963.767450] env[62405]: DEBUG oslo_vmware.api [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]522b7fb1-4459-8a3c-56b4-caa6adac96d0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1963.843745] env[62405]: DEBUG nova.network.neutron [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] Updating instance_info_cache with network_info: [{"id": "3189d804-1d8d-4356-bbf0-e0bbda0a2d32", "address": "fa:16:3e:b3:8f:fe", "network": {"id": "8e7f7222-48db-4dd5-a9e8-9a6d2b598918", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1945720715-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba9083cddcc24345b6ea5d2cbbbec5ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3189d804-1d", "ovs_interfaceid": "3189d804-1d8d-4356-bbf0-e0bbda0a2d32", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1963.853987] env[62405]: DEBUG oslo_concurrency.lockutils [req-b521796e-3eaa-4b3c-83fe-3c0f45075d2d req-b72520cd-cf2a-4619-a1c9-baab7f26895e service nova] Releasing lock "refresh_cache-1b820a12-4ca5-4b89-9016-81ebac4f1c3b" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1963.945889] env[62405]: DEBUG oslo_vmware.api [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1947924, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070039} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1963.946647] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: ca0ff947-1ae0-4f19-ae71-0784f2c20ebe] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1963.947183] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b28a9e19-8172-4d80-a184-ff32303041d7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.974754] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: ca0ff947-1ae0-4f19-ae71-0784f2c20ebe] Reconfiguring VM instance instance-00000064 to attach disk [datastore1] ca0ff947-1ae0-4f19-ae71-0784f2c20ebe/ca0ff947-1ae0-4f19-ae71-0784f2c20ebe.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1963.974924] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b537e5d3-0263-4d52-b5d1-ec704adbb9df {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.997217] env[62405]: DEBUG oslo_vmware.api [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Waiting for the task: (returnval){ [ 1963.997217] env[62405]: value = "task-1947926" [ 1963.997217] env[62405]: _type = "Task" [ 1963.997217] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1964.006311] env[62405]: DEBUG oslo_vmware.api [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1947926, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1964.093927] env[62405]: DEBUG oslo_vmware.api [None req-d775358f-0074-4a15-843d-e9e4bc2443ec tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1947925, 'name': PowerOnVM_Task, 'duration_secs': 0.421576} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1964.094236] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-d775358f-0074-4a15-843d-e9e4bc2443ec tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: a91a6d04-2ec0-4568-bdb3-732d148644de] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1964.094479] env[62405]: DEBUG nova.compute.manager [None req-d775358f-0074-4a15-843d-e9e4bc2443ec tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: a91a6d04-2ec0-4568-bdb3-732d148644de] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1964.095367] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17fdcbee-b16f-4e74-99d7-40a262212e70 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1964.139312] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3fd5af0a-10f8-4840-96c6-11ede660bacc tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.939s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1964.141877] env[62405]: DEBUG oslo_concurrency.lockutils [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.233s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1964.142938] env[62405]: INFO nova.compute.claims [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: d937c90c-10b2-4c57-b1db-7b433c3d9017] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1964.170564] env[62405]: INFO nova.scheduler.client.report [None req-3fd5af0a-10f8-4840-96c6-11ede660bacc tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Deleted allocations for instance 86378df0-a658-427d-aca5-de25f84eb28b [ 1964.223021] env[62405]: DEBUG nova.compute.manager [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 14512ed2-9eae-4753-b83c-8c0d0d5d9432] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1964.251186] env[62405]: DEBUG nova.virt.hardware [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1964.251458] env[62405]: DEBUG nova.virt.hardware [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1964.251648] env[62405]: DEBUG nova.virt.hardware [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1964.251906] env[62405]: DEBUG nova.virt.hardware [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1964.252144] env[62405]: DEBUG nova.virt.hardware [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1964.253027] env[62405]: DEBUG nova.virt.hardware [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1964.253027] env[62405]: DEBUG nova.virt.hardware [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1964.253027] env[62405]: DEBUG nova.virt.hardware [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1964.253027] env[62405]: DEBUG nova.virt.hardware [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1964.253027] env[62405]: DEBUG nova.virt.hardware [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1964.253269] env[62405]: DEBUG nova.virt.hardware [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1964.254102] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a726933-e643-4a00-8d09-9ca0651893ee {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1964.265469] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67345998-4873-4a43-b64d-73e5b5ce31dd {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1964.272224] env[62405]: DEBUG oslo_vmware.api [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]522b7fb1-4459-8a3c-56b4-caa6adac96d0, 'name': SearchDatastore_Task, 'duration_secs': 0.010251} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1964.273241] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f2ab383e-3dad-4312-bf5c-87a69e9cbe26 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1964.285710] env[62405]: DEBUG oslo_vmware.api [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Waiting for the task: (returnval){ [ 1964.285710] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]524a2975-7b94-a165-5f39-603d3144d990" [ 1964.285710] env[62405]: _type = "Task" [ 1964.285710] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1964.293047] env[62405]: DEBUG oslo_vmware.api [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]524a2975-7b94-a165-5f39-603d3144d990, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1964.346922] env[62405]: DEBUG oslo_concurrency.lockutils [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Releasing lock "refresh_cache-15718289-5c19-4c2d-a9d8-d30ce0d63c68" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1964.347287] env[62405]: DEBUG nova.compute.manager [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] Instance network_info: |[{"id": "3189d804-1d8d-4356-bbf0-e0bbda0a2d32", "address": "fa:16:3e:b3:8f:fe", "network": {"id": "8e7f7222-48db-4dd5-a9e8-9a6d2b598918", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1945720715-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba9083cddcc24345b6ea5d2cbbbec5ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3189d804-1d", "ovs_interfaceid": "3189d804-1d8d-4356-bbf0-e0bbda0a2d32", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1964.347806] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b3:8f:fe', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '15538852-1a3f-4f71-b4a9-4923c5837c4f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3189d804-1d8d-4356-bbf0-e0bbda0a2d32', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1964.355379] env[62405]: DEBUG oslo.service.loopingcall [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1964.355609] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1964.355840] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8254f2c6-d34a-4364-9ea7-92c383a7d4e6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1964.377425] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1964.377425] env[62405]: value = "task-1947927" [ 1964.377425] env[62405]: _type = "Task" [ 1964.377425] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1964.387241] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947927, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1964.508414] env[62405]: DEBUG oslo_vmware.api [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1947926, 'name': ReconfigVM_Task, 'duration_secs': 0.412406} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1964.508414] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: ca0ff947-1ae0-4f19-ae71-0784f2c20ebe] Reconfigured VM instance instance-00000064 to attach disk [datastore1] ca0ff947-1ae0-4f19-ae71-0784f2c20ebe/ca0ff947-1ae0-4f19-ae71-0784f2c20ebe.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1964.508734] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-34675883-6cc6-416e-a92a-6f00c3a5b474 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1964.515227] env[62405]: DEBUG oslo_vmware.api [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Waiting for the task: (returnval){ [ 1964.515227] env[62405]: value = "task-1947928" [ 1964.515227] env[62405]: _type = "Task" [ 1964.515227] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1964.521357] env[62405]: DEBUG nova.compute.manager [req-86750abd-0b1b-4a82-95b6-189bdbb38f7a req-020dc14d-9d09-40ee-8553-9e18fef019f5 service nova] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] Received event network-vif-plugged-3189d804-1d8d-4356-bbf0-e0bbda0a2d32 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1964.521575] env[62405]: DEBUG oslo_concurrency.lockutils [req-86750abd-0b1b-4a82-95b6-189bdbb38f7a req-020dc14d-9d09-40ee-8553-9e18fef019f5 service nova] Acquiring lock "15718289-5c19-4c2d-a9d8-d30ce0d63c68-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1964.521785] env[62405]: DEBUG oslo_concurrency.lockutils [req-86750abd-0b1b-4a82-95b6-189bdbb38f7a req-020dc14d-9d09-40ee-8553-9e18fef019f5 service nova] Lock "15718289-5c19-4c2d-a9d8-d30ce0d63c68-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1964.521952] env[62405]: DEBUG oslo_concurrency.lockutils [req-86750abd-0b1b-4a82-95b6-189bdbb38f7a req-020dc14d-9d09-40ee-8553-9e18fef019f5 service nova] Lock "15718289-5c19-4c2d-a9d8-d30ce0d63c68-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1964.522136] env[62405]: DEBUG nova.compute.manager [req-86750abd-0b1b-4a82-95b6-189bdbb38f7a req-020dc14d-9d09-40ee-8553-9e18fef019f5 service nova] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] No waiting events found dispatching network-vif-plugged-3189d804-1d8d-4356-bbf0-e0bbda0a2d32 {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1964.522307] env[62405]: WARNING nova.compute.manager [req-86750abd-0b1b-4a82-95b6-189bdbb38f7a req-020dc14d-9d09-40ee-8553-9e18fef019f5 service nova] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] Received unexpected event network-vif-plugged-3189d804-1d8d-4356-bbf0-e0bbda0a2d32 for instance with vm_state building and task_state spawning. [ 1964.522469] env[62405]: DEBUG nova.compute.manager [req-86750abd-0b1b-4a82-95b6-189bdbb38f7a req-020dc14d-9d09-40ee-8553-9e18fef019f5 service nova] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] Received event network-changed-3189d804-1d8d-4356-bbf0-e0bbda0a2d32 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1964.522620] env[62405]: DEBUG nova.compute.manager [req-86750abd-0b1b-4a82-95b6-189bdbb38f7a req-020dc14d-9d09-40ee-8553-9e18fef019f5 service nova] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] Refreshing instance network info cache due to event network-changed-3189d804-1d8d-4356-bbf0-e0bbda0a2d32. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1964.522800] env[62405]: DEBUG oslo_concurrency.lockutils [req-86750abd-0b1b-4a82-95b6-189bdbb38f7a req-020dc14d-9d09-40ee-8553-9e18fef019f5 service nova] Acquiring lock "refresh_cache-15718289-5c19-4c2d-a9d8-d30ce0d63c68" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1964.522936] env[62405]: DEBUG oslo_concurrency.lockutils [req-86750abd-0b1b-4a82-95b6-189bdbb38f7a req-020dc14d-9d09-40ee-8553-9e18fef019f5 service nova] Acquired lock "refresh_cache-15718289-5c19-4c2d-a9d8-d30ce0d63c68" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1964.523107] env[62405]: DEBUG nova.network.neutron [req-86750abd-0b1b-4a82-95b6-189bdbb38f7a req-020dc14d-9d09-40ee-8553-9e18fef019f5 service nova] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] Refreshing network info cache for port 3189d804-1d8d-4356-bbf0-e0bbda0a2d32 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1964.527842] env[62405]: DEBUG oslo_vmware.api [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1947928, 'name': Rename_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1964.680540] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3fd5af0a-10f8-4840-96c6-11ede660bacc tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Lock "86378df0-a658-427d-aca5-de25f84eb28b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.294s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1964.762698] env[62405]: DEBUG nova.network.neutron [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 14512ed2-9eae-4753-b83c-8c0d0d5d9432] Successfully updated port: fb939ba3-6c42-4855-80a4-e268dd0bbe54 {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1964.796448] env[62405]: DEBUG oslo_vmware.api [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]524a2975-7b94-a165-5f39-603d3144d990, 'name': SearchDatastore_Task, 'duration_secs': 0.016503} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1964.797206] env[62405]: DEBUG oslo_concurrency.lockutils [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1964.797206] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 1b820a12-4ca5-4b89-9016-81ebac4f1c3b/1b820a12-4ca5-4b89-9016-81ebac4f1c3b.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1964.797409] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-141926de-64da-4b7d-9285-3961be867c9e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1964.807398] env[62405]: DEBUG oslo_vmware.api [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Waiting for the task: (returnval){ [ 1964.807398] env[62405]: value = "task-1947929" [ 1964.807398] env[62405]: _type = "Task" [ 1964.807398] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1964.815257] env[62405]: DEBUG oslo_vmware.api [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': task-1947929, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1964.888133] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947927, 'name': CreateVM_Task, 'duration_secs': 0.507101} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1964.888340] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1964.889649] env[62405]: DEBUG oslo_concurrency.lockutils [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1964.889649] env[62405]: DEBUG oslo_concurrency.lockutils [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1964.889795] env[62405]: DEBUG oslo_concurrency.lockutils [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1964.890072] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-39d62d71-b84f-4074-a83e-1a77899f048e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1964.895146] env[62405]: DEBUG oslo_vmware.api [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Waiting for the task: (returnval){ [ 1964.895146] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52df93d8-e596-1d72-ba3b-c797a33ecc9d" [ 1964.895146] env[62405]: _type = "Task" [ 1964.895146] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1964.904089] env[62405]: DEBUG oslo_vmware.api [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52df93d8-e596-1d72-ba3b-c797a33ecc9d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1965.031116] env[62405]: DEBUG oslo_vmware.api [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1947928, 'name': Rename_Task, 'duration_secs': 0.158698} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1965.031761] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: ca0ff947-1ae0-4f19-ae71-0784f2c20ebe] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1965.032109] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ecc0c3e5-a85a-407a-b0fd-3a8228592e45 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1965.037840] env[62405]: DEBUG oslo_vmware.api [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Waiting for the task: (returnval){ [ 1965.037840] env[62405]: value = "task-1947930" [ 1965.037840] env[62405]: _type = "Task" [ 1965.037840] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1965.046604] env[62405]: DEBUG oslo_vmware.api [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1947930, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1965.268643] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Acquiring lock "refresh_cache-14512ed2-9eae-4753-b83c-8c0d0d5d9432" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1965.268869] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Acquired lock "refresh_cache-14512ed2-9eae-4753-b83c-8c0d0d5d9432" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1965.269018] env[62405]: DEBUG nova.network.neutron [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 14512ed2-9eae-4753-b83c-8c0d0d5d9432] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1965.281212] env[62405]: DEBUG nova.network.neutron [req-86750abd-0b1b-4a82-95b6-189bdbb38f7a req-020dc14d-9d09-40ee-8553-9e18fef019f5 service nova] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] Updated VIF entry in instance network info cache for port 3189d804-1d8d-4356-bbf0-e0bbda0a2d32. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1965.282031] env[62405]: DEBUG nova.network.neutron [req-86750abd-0b1b-4a82-95b6-189bdbb38f7a req-020dc14d-9d09-40ee-8553-9e18fef019f5 service nova] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] Updating instance_info_cache with network_info: [{"id": "3189d804-1d8d-4356-bbf0-e0bbda0a2d32", "address": "fa:16:3e:b3:8f:fe", "network": {"id": "8e7f7222-48db-4dd5-a9e8-9a6d2b598918", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1945720715-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba9083cddcc24345b6ea5d2cbbbec5ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3189d804-1d", "ovs_interfaceid": "3189d804-1d8d-4356-bbf0-e0bbda0a2d32", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1965.321153] env[62405]: DEBUG oslo_vmware.api [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': task-1947929, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1965.409260] env[62405]: DEBUG oslo_vmware.api [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52df93d8-e596-1d72-ba3b-c797a33ecc9d, 'name': SearchDatastore_Task, 'duration_secs': 0.015349} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1965.409563] env[62405]: DEBUG oslo_concurrency.lockutils [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1965.409652] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1965.410037] env[62405]: DEBUG oslo_concurrency.lockutils [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1965.410608] env[62405]: DEBUG oslo_concurrency.lockutils [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1965.410608] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1965.411193] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-da561123-bd0b-4228-accb-35b05e42cd8f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1965.431593] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1965.431791] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1965.432583] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7a752cd3-443f-4c25-9984-982306d40772 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1965.439977] env[62405]: DEBUG oslo_vmware.api [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Waiting for the task: (returnval){ [ 1965.439977] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52cd4b8e-97a6-3f60-9fc5-ca0604532f33" [ 1965.439977] env[62405]: _type = "Task" [ 1965.439977] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1965.450348] env[62405]: DEBUG oslo_vmware.api [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52cd4b8e-97a6-3f60-9fc5-ca0604532f33, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1965.482229] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0dd6e83-5488-432b-a3f6-7ecf0074eba1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1965.490120] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bbb4020-d7aa-476d-87ab-1e0894912aa5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1965.521149] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b6642bc-09a6-40f9-b6d6-7095074ac523 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1965.528067] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95702e5a-e8b9-4b79-997d-e5cba3e3181e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1965.541352] env[62405]: DEBUG nova.compute.provider_tree [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1965.550743] env[62405]: DEBUG oslo_vmware.api [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1947930, 'name': PowerOnVM_Task} progress is 79%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1965.749056] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Acquiring lock "5645c9a4-2640-4190-956f-00fc2ea03a3a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1965.749337] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Lock "5645c9a4-2640-4190-956f-00fc2ea03a3a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1965.784500] env[62405]: DEBUG oslo_concurrency.lockutils [req-86750abd-0b1b-4a82-95b6-189bdbb38f7a req-020dc14d-9d09-40ee-8553-9e18fef019f5 service nova] Releasing lock "refresh_cache-15718289-5c19-4c2d-a9d8-d30ce0d63c68" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1965.810389] env[62405]: DEBUG nova.network.neutron [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 14512ed2-9eae-4753-b83c-8c0d0d5d9432] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1965.822400] env[62405]: DEBUG oslo_vmware.api [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': task-1947929, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.884674} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1965.822400] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 1b820a12-4ca5-4b89-9016-81ebac4f1c3b/1b820a12-4ca5-4b89-9016-81ebac4f1c3b.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1965.822611] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: 1b820a12-4ca5-4b89-9016-81ebac4f1c3b] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1965.822706] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f83c4c1e-b510-4376-be4d-4938271c67c9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1965.828897] env[62405]: DEBUG oslo_vmware.api [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Waiting for the task: (returnval){ [ 1965.828897] env[62405]: value = "task-1947931" [ 1965.828897] env[62405]: _type = "Task" [ 1965.828897] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1965.841627] env[62405]: DEBUG oslo_vmware.api [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': task-1947931, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1965.889013] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1965.889013] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1965.949662] env[62405]: DEBUG oslo_vmware.api [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52cd4b8e-97a6-3f60-9fc5-ca0604532f33, 'name': SearchDatastore_Task, 'duration_secs': 0.014634} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1965.950587] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2849fb95-257c-4cde-a27b-30040d5264f9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1965.956129] env[62405]: DEBUG oslo_vmware.api [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Waiting for the task: (returnval){ [ 1965.956129] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5232a5a8-4afc-3e8c-c7ac-6e2bd4e210b1" [ 1965.956129] env[62405]: _type = "Task" [ 1965.956129] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1965.963619] env[62405]: DEBUG oslo_vmware.api [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5232a5a8-4afc-3e8c-c7ac-6e2bd4e210b1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1965.971866] env[62405]: DEBUG nova.network.neutron [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 14512ed2-9eae-4753-b83c-8c0d0d5d9432] Updating instance_info_cache with network_info: [{"id": "fb939ba3-6c42-4855-80a4-e268dd0bbe54", "address": "fa:16:3e:56:d7:78", "network": {"id": "dfd15c21-b7a5-492a-afe3-8eb96515351d", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-175692276-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6014bab6bc9a4b059bab88e44b31f446", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "950a2f67-7668-4376-9d48-b38dca033c40", "external-id": "nsx-vlan-transportzone-549", "segmentation_id": 549, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfb939ba3-6c", "ovs_interfaceid": "fb939ba3-6c42-4855-80a4-e268dd0bbe54", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1966.046290] env[62405]: DEBUG nova.scheduler.client.report [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1966.054982] env[62405]: DEBUG oslo_vmware.api [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1947930, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1966.252204] env[62405]: DEBUG nova.compute.manager [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 5645c9a4-2640-4190-956f-00fc2ea03a3a] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1966.338380] env[62405]: DEBUG oslo_vmware.api [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': task-1947931, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.181673} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1966.339033] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: 1b820a12-4ca5-4b89-9016-81ebac4f1c3b] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1966.339533] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd60ccf4-fbb2-4285-bdda-2a852e68dd9f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.361497] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: 1b820a12-4ca5-4b89-9016-81ebac4f1c3b] Reconfiguring VM instance instance-00000065 to attach disk [datastore1] 1b820a12-4ca5-4b89-9016-81ebac4f1c3b/1b820a12-4ca5-4b89-9016-81ebac4f1c3b.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1966.362169] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6b3c8d09-1a0a-4bdf-a2f3-ae53680bf90f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.383354] env[62405]: DEBUG oslo_vmware.api [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Waiting for the task: (returnval){ [ 1966.383354] env[62405]: value = "task-1947932" [ 1966.383354] env[62405]: _type = "Task" [ 1966.383354] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1966.395651] env[62405]: DEBUG oslo_vmware.api [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': task-1947932, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1966.396219] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1966.396294] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Starting heal instance info cache {{(pid=62405) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10446}} [ 1966.467262] env[62405]: DEBUG oslo_vmware.api [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5232a5a8-4afc-3e8c-c7ac-6e2bd4e210b1, 'name': SearchDatastore_Task, 'duration_secs': 0.011185} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1966.467592] env[62405]: DEBUG oslo_concurrency.lockutils [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1966.467904] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 15718289-5c19-4c2d-a9d8-d30ce0d63c68/15718289-5c19-4c2d-a9d8-d30ce0d63c68.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1966.468319] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-871eb910-a2df-43c9-be55-aef52b2859ba {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.474810] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Releasing lock "refresh_cache-14512ed2-9eae-4753-b83c-8c0d0d5d9432" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1966.475224] env[62405]: DEBUG nova.compute.manager [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 14512ed2-9eae-4753-b83c-8c0d0d5d9432] Instance network_info: |[{"id": "fb939ba3-6c42-4855-80a4-e268dd0bbe54", "address": "fa:16:3e:56:d7:78", "network": {"id": "dfd15c21-b7a5-492a-afe3-8eb96515351d", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-175692276-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6014bab6bc9a4b059bab88e44b31f446", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "950a2f67-7668-4376-9d48-b38dca033c40", "external-id": "nsx-vlan-transportzone-549", "segmentation_id": 549, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfb939ba3-6c", "ovs_interfaceid": "fb939ba3-6c42-4855-80a4-e268dd0bbe54", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1966.477218] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 14512ed2-9eae-4753-b83c-8c0d0d5d9432] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:56:d7:78', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '950a2f67-7668-4376-9d48-b38dca033c40', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fb939ba3-6c42-4855-80a4-e268dd0bbe54', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1966.486823] env[62405]: DEBUG oslo.service.loopingcall [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1966.487233] env[62405]: DEBUG oslo_vmware.api [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Waiting for the task: (returnval){ [ 1966.487233] env[62405]: value = "task-1947933" [ 1966.487233] env[62405]: _type = "Task" [ 1966.487233] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1966.487728] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 14512ed2-9eae-4753-b83c-8c0d0d5d9432] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1966.488436] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-79a456d3-56a0-44c2-9df0-7942dd50a37f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.513913] env[62405]: DEBUG oslo_vmware.api [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947933, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1966.515185] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1966.515185] env[62405]: value = "task-1947934" [ 1966.515185] env[62405]: _type = "Task" [ 1966.515185] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1966.522558] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947934, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1966.549540] env[62405]: DEBUG nova.compute.manager [req-19da10a5-9179-4a65-9c83-aa9919cd388f req-d1894d14-20a6-4400-8a0b-31f9ce05cdda service nova] [instance: 14512ed2-9eae-4753-b83c-8c0d0d5d9432] Received event network-vif-plugged-fb939ba3-6c42-4855-80a4-e268dd0bbe54 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1966.549758] env[62405]: DEBUG oslo_concurrency.lockutils [req-19da10a5-9179-4a65-9c83-aa9919cd388f req-d1894d14-20a6-4400-8a0b-31f9ce05cdda service nova] Acquiring lock "14512ed2-9eae-4753-b83c-8c0d0d5d9432-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1966.549989] env[62405]: DEBUG oslo_concurrency.lockutils [req-19da10a5-9179-4a65-9c83-aa9919cd388f req-d1894d14-20a6-4400-8a0b-31f9ce05cdda service nova] Lock "14512ed2-9eae-4753-b83c-8c0d0d5d9432-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1966.550188] env[62405]: DEBUG oslo_concurrency.lockutils [req-19da10a5-9179-4a65-9c83-aa9919cd388f req-d1894d14-20a6-4400-8a0b-31f9ce05cdda service nova] Lock "14512ed2-9eae-4753-b83c-8c0d0d5d9432-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1966.551066] env[62405]: DEBUG nova.compute.manager [req-19da10a5-9179-4a65-9c83-aa9919cd388f req-d1894d14-20a6-4400-8a0b-31f9ce05cdda service nova] [instance: 14512ed2-9eae-4753-b83c-8c0d0d5d9432] No waiting events found dispatching network-vif-plugged-fb939ba3-6c42-4855-80a4-e268dd0bbe54 {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1966.551066] env[62405]: WARNING nova.compute.manager [req-19da10a5-9179-4a65-9c83-aa9919cd388f req-d1894d14-20a6-4400-8a0b-31f9ce05cdda service nova] [instance: 14512ed2-9eae-4753-b83c-8c0d0d5d9432] Received unexpected event network-vif-plugged-fb939ba3-6c42-4855-80a4-e268dd0bbe54 for instance with vm_state building and task_state spawning. [ 1966.551066] env[62405]: DEBUG nova.compute.manager [req-19da10a5-9179-4a65-9c83-aa9919cd388f req-d1894d14-20a6-4400-8a0b-31f9ce05cdda service nova] [instance: 14512ed2-9eae-4753-b83c-8c0d0d5d9432] Received event network-changed-fb939ba3-6c42-4855-80a4-e268dd0bbe54 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1966.551066] env[62405]: DEBUG nova.compute.manager [req-19da10a5-9179-4a65-9c83-aa9919cd388f req-d1894d14-20a6-4400-8a0b-31f9ce05cdda service nova] [instance: 14512ed2-9eae-4753-b83c-8c0d0d5d9432] Refreshing instance network info cache due to event network-changed-fb939ba3-6c42-4855-80a4-e268dd0bbe54. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1966.551066] env[62405]: DEBUG oslo_concurrency.lockutils [req-19da10a5-9179-4a65-9c83-aa9919cd388f req-d1894d14-20a6-4400-8a0b-31f9ce05cdda service nova] Acquiring lock "refresh_cache-14512ed2-9eae-4753-b83c-8c0d0d5d9432" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1966.551212] env[62405]: DEBUG oslo_concurrency.lockutils [req-19da10a5-9179-4a65-9c83-aa9919cd388f req-d1894d14-20a6-4400-8a0b-31f9ce05cdda service nova] Acquired lock "refresh_cache-14512ed2-9eae-4753-b83c-8c0d0d5d9432" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1966.551335] env[62405]: DEBUG nova.network.neutron [req-19da10a5-9179-4a65-9c83-aa9919cd388f req-d1894d14-20a6-4400-8a0b-31f9ce05cdda service nova] [instance: 14512ed2-9eae-4753-b83c-8c0d0d5d9432] Refreshing network info cache for port fb939ba3-6c42-4855-80a4-e268dd0bbe54 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1966.556057] env[62405]: DEBUG oslo_concurrency.lockutils [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.412s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1966.556057] env[62405]: DEBUG nova.compute.manager [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: d937c90c-10b2-4c57-b1db-7b433c3d9017] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1966.560766] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5472e0a7-a69a-4211-86bb-156b0799f8ee tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 12.552s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1966.562370] env[62405]: DEBUG oslo_vmware.api [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1947930, 'name': PowerOnVM_Task, 'duration_secs': 1.047021} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1966.562643] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: ca0ff947-1ae0-4f19-ae71-0784f2c20ebe] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1966.562833] env[62405]: INFO nova.compute.manager [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: ca0ff947-1ae0-4f19-ae71-0784f2c20ebe] Took 9.61 seconds to spawn the instance on the hypervisor. [ 1966.563017] env[62405]: DEBUG nova.compute.manager [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: ca0ff947-1ae0-4f19-ae71-0784f2c20ebe] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1966.563814] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b98bcd22-2348-4a43-af01-6fbe07271837 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.781859] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1966.896737] env[62405]: DEBUG oslo_vmware.api [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': task-1947932, 'name': ReconfigVM_Task, 'duration_secs': 0.366412} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1966.897233] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: 1b820a12-4ca5-4b89-9016-81ebac4f1c3b] Reconfigured VM instance instance-00000065 to attach disk [datastore1] 1b820a12-4ca5-4b89-9016-81ebac4f1c3b/1b820a12-4ca5-4b89-9016-81ebac4f1c3b.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1966.899773] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8fe459e5-ce9f-464d-b385-0d85341d9f0e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.909389] env[62405]: DEBUG oslo_vmware.api [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Waiting for the task: (returnval){ [ 1966.909389] env[62405]: value = "task-1947935" [ 1966.909389] env[62405]: _type = "Task" [ 1966.909389] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1966.918701] env[62405]: DEBUG oslo_vmware.api [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': task-1947935, 'name': Rename_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1966.998589] env[62405]: DEBUG oslo_vmware.api [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947933, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1967.026247] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947934, 'name': CreateVM_Task} progress is 25%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1967.061957] env[62405]: DEBUG nova.compute.utils [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1967.063776] env[62405]: DEBUG nova.compute.manager [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: d937c90c-10b2-4c57-b1db-7b433c3d9017] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1967.063776] env[62405]: DEBUG nova.network.neutron [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: d937c90c-10b2-4c57-b1db-7b433c3d9017] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1967.070630] env[62405]: INFO nova.compute.claims [None req-5472e0a7-a69a-4211-86bb-156b0799f8ee tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: ec0a05fc-4a11-4e07-a03c-e357a7a750ab] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1967.078044] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4d8427b3-ab0a-4ad4-abc7-7fa0e6146ed4 tempest-ServersAdminTestJSON-138840338 tempest-ServersAdminTestJSON-138840338-project-admin] Acquiring lock "refresh_cache-ca0ff947-1ae0-4f19-ae71-0784f2c20ebe" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1967.078044] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4d8427b3-ab0a-4ad4-abc7-7fa0e6146ed4 tempest-ServersAdminTestJSON-138840338 tempest-ServersAdminTestJSON-138840338-project-admin] Acquired lock "refresh_cache-ca0ff947-1ae0-4f19-ae71-0784f2c20ebe" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1967.078044] env[62405]: DEBUG nova.network.neutron [None req-4d8427b3-ab0a-4ad4-abc7-7fa0e6146ed4 tempest-ServersAdminTestJSON-138840338 tempest-ServersAdminTestJSON-138840338-project-admin] [instance: ca0ff947-1ae0-4f19-ae71-0784f2c20ebe] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1967.087506] env[62405]: INFO nova.compute.manager [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: ca0ff947-1ae0-4f19-ae71-0784f2c20ebe] Took 31.32 seconds to build instance. [ 1967.146212] env[62405]: DEBUG nova.policy [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ae6ca334510b4445a23dc2fb38215590', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f1a1645e38674042828c78155974f95e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1967.420599] env[62405]: DEBUG oslo_vmware.api [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': task-1947935, 'name': Rename_Task, 'duration_secs': 0.293811} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1967.420925] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: 1b820a12-4ca5-4b89-9016-81ebac4f1c3b] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1967.421151] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-21ede1d2-b3dd-4939-aad5-3c14e2f62cd3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.428051] env[62405]: DEBUG oslo_vmware.api [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Waiting for the task: (returnval){ [ 1967.428051] env[62405]: value = "task-1947936" [ 1967.428051] env[62405]: _type = "Task" [ 1967.428051] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1967.435798] env[62405]: DEBUG oslo_vmware.api [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': task-1947936, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1967.441847] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Acquiring lock "refresh_cache-4d59d9fd-23df-4933-97ed-32602e51e9aa" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1967.441993] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Acquired lock "refresh_cache-4d59d9fd-23df-4933-97ed-32602e51e9aa" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1967.442157] env[62405]: DEBUG nova.network.neutron [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 4d59d9fd-23df-4933-97ed-32602e51e9aa] Forcefully refreshing network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1967.443889] env[62405]: DEBUG nova.network.neutron [req-19da10a5-9179-4a65-9c83-aa9919cd388f req-d1894d14-20a6-4400-8a0b-31f9ce05cdda service nova] [instance: 14512ed2-9eae-4753-b83c-8c0d0d5d9432] Updated VIF entry in instance network info cache for port fb939ba3-6c42-4855-80a4-e268dd0bbe54. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1967.444722] env[62405]: DEBUG nova.network.neutron [req-19da10a5-9179-4a65-9c83-aa9919cd388f req-d1894d14-20a6-4400-8a0b-31f9ce05cdda service nova] [instance: 14512ed2-9eae-4753-b83c-8c0d0d5d9432] Updating instance_info_cache with network_info: [{"id": "fb939ba3-6c42-4855-80a4-e268dd0bbe54", "address": "fa:16:3e:56:d7:78", "network": {"id": "dfd15c21-b7a5-492a-afe3-8eb96515351d", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-175692276-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6014bab6bc9a4b059bab88e44b31f446", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "950a2f67-7668-4376-9d48-b38dca033c40", "external-id": "nsx-vlan-transportzone-549", "segmentation_id": 549, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfb939ba3-6c", "ovs_interfaceid": "fb939ba3-6c42-4855-80a4-e268dd0bbe54", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1967.505354] env[62405]: DEBUG oslo_vmware.api [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947933, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.708112} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1967.505680] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 15718289-5c19-4c2d-a9d8-d30ce0d63c68/15718289-5c19-4c2d-a9d8-d30ce0d63c68.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1967.506104] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1967.506194] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a01e9b04-f9ef-4155-8be1-3b2441235c47 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.513581] env[62405]: DEBUG oslo_vmware.api [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Waiting for the task: (returnval){ [ 1967.513581] env[62405]: value = "task-1947937" [ 1967.513581] env[62405]: _type = "Task" [ 1967.513581] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1967.526897] env[62405]: DEBUG oslo_vmware.api [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947937, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1967.531056] env[62405]: DEBUG nova.network.neutron [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: d937c90c-10b2-4c57-b1db-7b433c3d9017] Successfully created port: 75ce34f4-4ddb-4250-95f6-4b8af0eb3bdc {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1967.533282] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947934, 'name': CreateVM_Task} progress is 25%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1967.568394] env[62405]: DEBUG nova.compute.manager [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: d937c90c-10b2-4c57-b1db-7b433c3d9017] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1967.577062] env[62405]: INFO nova.compute.resource_tracker [None req-5472e0a7-a69a-4211-86bb-156b0799f8ee tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: ec0a05fc-4a11-4e07-a03c-e357a7a750ab] Updating resource usage from migration 215904f6-d5c8-46ab-a546-1d37550b6512 [ 1967.590612] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8675b39d-e822-4eea-a326-a3c4fcbbd9a2 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Lock "ca0ff947-1ae0-4f19-ae71-0784f2c20ebe" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.829s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1967.923927] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-237bf102-7328-4dcf-b059-914cdf79d827 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.934830] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3279c8d7-8ecc-49f2-8d24-ef1bd14437ed {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.943798] env[62405]: DEBUG oslo_vmware.api [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': task-1947936, 'name': PowerOnVM_Task} progress is 94%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1967.971451] env[62405]: DEBUG oslo_concurrency.lockutils [req-19da10a5-9179-4a65-9c83-aa9919cd388f req-d1894d14-20a6-4400-8a0b-31f9ce05cdda service nova] Releasing lock "refresh_cache-14512ed2-9eae-4753-b83c-8c0d0d5d9432" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1967.978036] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4f5f5fb-70a4-4255-96b2-dc54f0ba7984 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.985625] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e606e82f-bc2f-49e3-809d-82f3fc61dbec {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.005610] env[62405]: DEBUG nova.compute.provider_tree [None req-5472e0a7-a69a-4211-86bb-156b0799f8ee tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1968.029096] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947934, 'name': CreateVM_Task} progress is 25%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1968.031789] env[62405]: DEBUG oslo_vmware.api [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947937, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.214727} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1968.032143] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1968.032909] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-deddb137-df94-4339-8808-b54010aba2c4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.056228] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] Reconfiguring VM instance instance-00000066 to attach disk [datastore1] 15718289-5c19-4c2d-a9d8-d30ce0d63c68/15718289-5c19-4c2d-a9d8-d30ce0d63c68.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1968.057217] env[62405]: DEBUG nova.network.neutron [None req-4d8427b3-ab0a-4ad4-abc7-7fa0e6146ed4 tempest-ServersAdminTestJSON-138840338 tempest-ServersAdminTestJSON-138840338-project-admin] [instance: ca0ff947-1ae0-4f19-ae71-0784f2c20ebe] Updating instance_info_cache with network_info: [{"id": "9835277c-1b66-4088-ab4a-9d2b8e7e6266", "address": "fa:16:3e:1d:dd:20", "network": {"id": "b8b3a499-0f88-47ce-900b-2cc51e1d123f", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-5251275-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1d2ff9a8cb1840889a4a2a87c663f59e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6399297e-11b6-47b0-9a9f-712bb90b6ea1", "external-id": "nsx-vlan-transportzone-213", "segmentation_id": 213, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9835277c-1b", "ovs_interfaceid": "9835277c-1b66-4088-ab4a-9d2b8e7e6266", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1968.058627] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ef1b429e-aed4-4d29-ae3f-b34199c4b818 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.073692] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4d8427b3-ab0a-4ad4-abc7-7fa0e6146ed4 tempest-ServersAdminTestJSON-138840338 tempest-ServersAdminTestJSON-138840338-project-admin] Releasing lock "refresh_cache-ca0ff947-1ae0-4f19-ae71-0784f2c20ebe" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1968.073905] env[62405]: DEBUG nova.compute.manager [None req-4d8427b3-ab0a-4ad4-abc7-7fa0e6146ed4 tempest-ServersAdminTestJSON-138840338 tempest-ServersAdminTestJSON-138840338-project-admin] [instance: ca0ff947-1ae0-4f19-ae71-0784f2c20ebe] Inject network info {{(pid=62405) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7737}} [ 1968.074167] env[62405]: DEBUG nova.compute.manager [None req-4d8427b3-ab0a-4ad4-abc7-7fa0e6146ed4 tempest-ServersAdminTestJSON-138840338 tempest-ServersAdminTestJSON-138840338-project-admin] [instance: ca0ff947-1ae0-4f19-ae71-0784f2c20ebe] network_info to inject: |[{"id": "9835277c-1b66-4088-ab4a-9d2b8e7e6266", "address": "fa:16:3e:1d:dd:20", "network": {"id": "b8b3a499-0f88-47ce-900b-2cc51e1d123f", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-5251275-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1d2ff9a8cb1840889a4a2a87c663f59e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6399297e-11b6-47b0-9a9f-712bb90b6ea1", "external-id": "nsx-vlan-transportzone-213", "segmentation_id": 213, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9835277c-1b", "ovs_interfaceid": "9835277c-1b66-4088-ab4a-9d2b8e7e6266", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7738}} [ 1968.078814] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-4d8427b3-ab0a-4ad4-abc7-7fa0e6146ed4 tempest-ServersAdminTestJSON-138840338 tempest-ServersAdminTestJSON-138840338-project-admin] [instance: ca0ff947-1ae0-4f19-ae71-0784f2c20ebe] Reconfiguring VM instance to set the machine id {{(pid=62405) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 1968.082643] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8da20e76-029f-4c52-9111-613882c86a4a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.099563] env[62405]: DEBUG oslo_vmware.api [None req-4d8427b3-ab0a-4ad4-abc7-7fa0e6146ed4 tempest-ServersAdminTestJSON-138840338 tempest-ServersAdminTestJSON-138840338-project-admin] Waiting for the task: (returnval){ [ 1968.099563] env[62405]: value = "task-1947939" [ 1968.099563] env[62405]: _type = "Task" [ 1968.099563] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1968.102605] env[62405]: DEBUG oslo_vmware.api [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Waiting for the task: (returnval){ [ 1968.102605] env[62405]: value = "task-1947938" [ 1968.102605] env[62405]: _type = "Task" [ 1968.102605] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1968.119688] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Acquiring lock "81d9be97-9147-4754-80c2-68c1a389842e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1968.119960] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Lock "81d9be97-9147-4754-80c2-68c1a389842e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1968.121282] env[62405]: DEBUG oslo_vmware.api [None req-4d8427b3-ab0a-4ad4-abc7-7fa0e6146ed4 tempest-ServersAdminTestJSON-138840338 tempest-ServersAdminTestJSON-138840338-project-admin] Task: {'id': task-1947939, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1968.121523] env[62405]: DEBUG oslo_vmware.api [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947938, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1968.440558] env[62405]: DEBUG oslo_vmware.api [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': task-1947936, 'name': PowerOnVM_Task, 'duration_secs': 0.568952} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1968.440853] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: 1b820a12-4ca5-4b89-9016-81ebac4f1c3b] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1968.441098] env[62405]: INFO nova.compute.manager [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: 1b820a12-4ca5-4b89-9016-81ebac4f1c3b] Took 9.11 seconds to spawn the instance on the hypervisor. [ 1968.441294] env[62405]: DEBUG nova.compute.manager [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: 1b820a12-4ca5-4b89-9016-81ebac4f1c3b] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1968.442099] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a61241df-5ffe-4df0-b8e6-2a22c82ff4fd {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.508856] env[62405]: DEBUG nova.scheduler.client.report [None req-5472e0a7-a69a-4211-86bb-156b0799f8ee tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1968.531384] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947934, 'name': CreateVM_Task} progress is 25%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1968.597018] env[62405]: DEBUG nova.compute.manager [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: d937c90c-10b2-4c57-b1db-7b433c3d9017] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1968.615812] env[62405]: DEBUG oslo_vmware.api [None req-4d8427b3-ab0a-4ad4-abc7-7fa0e6146ed4 tempest-ServersAdminTestJSON-138840338 tempest-ServersAdminTestJSON-138840338-project-admin] Task: {'id': task-1947939, 'name': ReconfigVM_Task, 'duration_secs': 0.174793} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1968.619776] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-4d8427b3-ab0a-4ad4-abc7-7fa0e6146ed4 tempest-ServersAdminTestJSON-138840338 tempest-ServersAdminTestJSON-138840338-project-admin] [instance: ca0ff947-1ae0-4f19-ae71-0784f2c20ebe] Reconfigured VM instance to set the machine id {{(pid=62405) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 1968.620793] env[62405]: DEBUG oslo_vmware.api [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947938, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1968.622897] env[62405]: DEBUG nova.compute.manager [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1968.636635] env[62405]: DEBUG nova.virt.hardware [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1968.636893] env[62405]: DEBUG nova.virt.hardware [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1968.637065] env[62405]: DEBUG nova.virt.hardware [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1968.637254] env[62405]: DEBUG nova.virt.hardware [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1968.637518] env[62405]: DEBUG nova.virt.hardware [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1968.637592] env[62405]: DEBUG nova.virt.hardware [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1968.637779] env[62405]: DEBUG nova.virt.hardware [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1968.638057] env[62405]: DEBUG nova.virt.hardware [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1968.638275] env[62405]: DEBUG nova.virt.hardware [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1968.638450] env[62405]: DEBUG nova.virt.hardware [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1968.638701] env[62405]: DEBUG nova.virt.hardware [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1968.640961] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e41e5f17-2f5b-49ee-8da0-39d05027fbfe {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.650081] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f59066cb-53b5-48c4-bc89-fc52582884c8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.705253] env[62405]: DEBUG nova.network.neutron [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 4d59d9fd-23df-4933-97ed-32602e51e9aa] Updating instance_info_cache with network_info: [{"id": "d7c38983-3ca5-4934-af4a-1bf5f845ec9a", "address": "fa:16:3e:12:b6:c9", "network": {"id": "c3707aa8-0488-4e47-a655-7bb7788995c0", "bridge": "br-int", "label": "tempest-ServersTestJSON-2054002489-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ba2fba100b943a2a415ec37b9365388", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "68ec9c06-8680-4a41-abad-cddbd1f768c9", "external-id": "nsx-vlan-transportzone-883", "segmentation_id": 883, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd7c38983-3c", "ovs_interfaceid": "d7c38983-3ca5-4934-af4a-1bf5f845ec9a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1968.960301] env[62405]: INFO nova.compute.manager [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: 1b820a12-4ca5-4b89-9016-81ebac4f1c3b] Took 32.44 seconds to build instance. [ 1969.013946] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5472e0a7-a69a-4211-86bb-156b0799f8ee tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.453s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1969.014188] env[62405]: INFO nova.compute.manager [None req-5472e0a7-a69a-4211-86bb-156b0799f8ee tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: ec0a05fc-4a11-4e07-a03c-e357a7a750ab] Migrating [ 1969.020744] env[62405]: DEBUG oslo_concurrency.lockutils [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.980s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1969.020980] env[62405]: DEBUG nova.objects.instance [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Lazy-loading 'pci_requests' on Instance uuid f16e3d13-6db6-4f61-b0e4-661856a9166b {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1969.042058] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947934, 'name': CreateVM_Task} progress is 25%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1969.119035] env[62405]: DEBUG oslo_vmware.api [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947938, 'name': ReconfigVM_Task, 'duration_secs': 0.616423} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1969.119035] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] Reconfigured VM instance instance-00000066 to attach disk [datastore1] 15718289-5c19-4c2d-a9d8-d30ce0d63c68/15718289-5c19-4c2d-a9d8-d30ce0d63c68.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1969.119035] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-145b21fa-d710-4b1f-b16d-b148d03389a5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1969.124921] env[62405]: DEBUG oslo_vmware.api [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Waiting for the task: (returnval){ [ 1969.124921] env[62405]: value = "task-1947940" [ 1969.124921] env[62405]: _type = "Task" [ 1969.124921] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1969.136152] env[62405]: DEBUG oslo_vmware.api [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947940, 'name': Rename_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1969.154553] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1969.209404] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Releasing lock "refresh_cache-4d59d9fd-23df-4933-97ed-32602e51e9aa" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1969.209404] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 4d59d9fd-23df-4933-97ed-32602e51e9aa] Updated the network info_cache for instance {{(pid=62405) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10517}} [ 1969.209404] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1969.209404] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1969.209404] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1969.209672] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1969.210266] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1969.210266] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1969.210266] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62405) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11065}} [ 1969.210266] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager.update_available_resource {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1969.453500] env[62405]: INFO nova.compute.manager [None req-8f178103-3f5f-4a58-a9e8-b71478d977af tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Rebuilding instance [ 1969.463213] env[62405]: DEBUG oslo_concurrency.lockutils [None req-45468540-6450-457e-9233-29b74278385f tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Lock "1b820a12-4ca5-4b89-9016-81ebac4f1c3b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.948s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1969.495742] env[62405]: DEBUG nova.compute.manager [None req-8f178103-3f5f-4a58-a9e8-b71478d977af tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1969.496636] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-885c24e9-c872-4cd2-8604-b9b89dae1fe5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1969.500822] env[62405]: DEBUG oslo_concurrency.lockutils [None req-59a47073-dc2f-443a-ab69-a2287d354fcb tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Acquiring lock "1b820a12-4ca5-4b89-9016-81ebac4f1c3b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1969.501114] env[62405]: DEBUG oslo_concurrency.lockutils [None req-59a47073-dc2f-443a-ab69-a2287d354fcb tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Lock "1b820a12-4ca5-4b89-9016-81ebac4f1c3b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1969.501338] env[62405]: DEBUG oslo_concurrency.lockutils [None req-59a47073-dc2f-443a-ab69-a2287d354fcb tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Acquiring lock "1b820a12-4ca5-4b89-9016-81ebac4f1c3b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1969.501541] env[62405]: DEBUG oslo_concurrency.lockutils [None req-59a47073-dc2f-443a-ab69-a2287d354fcb tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Lock "1b820a12-4ca5-4b89-9016-81ebac4f1c3b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1969.501712] env[62405]: DEBUG oslo_concurrency.lockutils [None req-59a47073-dc2f-443a-ab69-a2287d354fcb tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Lock "1b820a12-4ca5-4b89-9016-81ebac4f1c3b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1969.506790] env[62405]: INFO nova.compute.manager [None req-59a47073-dc2f-443a-ab69-a2287d354fcb tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: 1b820a12-4ca5-4b89-9016-81ebac4f1c3b] Terminating instance [ 1969.533156] env[62405]: DEBUG nova.objects.instance [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Lazy-loading 'numa_topology' on Instance uuid f16e3d13-6db6-4f61-b0e4-661856a9166b {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1969.534216] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947934, 'name': CreateVM_Task} progress is 25%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1969.536532] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5472e0a7-a69a-4211-86bb-156b0799f8ee tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquiring lock "refresh_cache-ec0a05fc-4a11-4e07-a03c-e357a7a750ab" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1969.536703] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5472e0a7-a69a-4211-86bb-156b0799f8ee tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquired lock "refresh_cache-ec0a05fc-4a11-4e07-a03c-e357a7a750ab" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1969.536932] env[62405]: DEBUG nova.network.neutron [None req-5472e0a7-a69a-4211-86bb-156b0799f8ee tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: ec0a05fc-4a11-4e07-a03c-e357a7a750ab] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1969.579438] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8cd04a83-6bc6-4493-91b4-b1504d0994e6 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Acquiring lock "b495f9e6-60c8-4509-a34f-2e7ed59b6d82" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1969.579692] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8cd04a83-6bc6-4493-91b4-b1504d0994e6 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Lock "b495f9e6-60c8-4509-a34f-2e7ed59b6d82" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1969.579882] env[62405]: DEBUG nova.compute.manager [None req-8cd04a83-6bc6-4493-91b4-b1504d0994e6 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1969.581220] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83f71217-c22b-4f44-8773-ecf89facdf4c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1969.587583] env[62405]: DEBUG nova.compute.manager [None req-8cd04a83-6bc6-4493-91b4-b1504d0994e6 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62405) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1969.588145] env[62405]: DEBUG nova.objects.instance [None req-8cd04a83-6bc6-4493-91b4-b1504d0994e6 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Lazy-loading 'flavor' on Instance uuid b495f9e6-60c8-4509-a34f-2e7ed59b6d82 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1969.634249] env[62405]: DEBUG oslo_vmware.api [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947940, 'name': Rename_Task, 'duration_secs': 0.386003} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1969.634516] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1969.634754] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d55bb83a-ef32-430f-af7e-facd873d175b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1969.641049] env[62405]: DEBUG oslo_vmware.api [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Waiting for the task: (returnval){ [ 1969.641049] env[62405]: value = "task-1947941" [ 1969.641049] env[62405]: _type = "Task" [ 1969.641049] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1969.648241] env[62405]: DEBUG oslo_vmware.api [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947941, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1969.713215] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1970.011364] env[62405]: DEBUG nova.compute.manager [None req-59a47073-dc2f-443a-ab69-a2287d354fcb tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: 1b820a12-4ca5-4b89-9016-81ebac4f1c3b] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1970.011612] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-59a47073-dc2f-443a-ab69-a2287d354fcb tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: 1b820a12-4ca5-4b89-9016-81ebac4f1c3b] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1970.012630] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8253bb94-dea1-4244-9f07-465a3448416c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1970.020659] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-59a47073-dc2f-443a-ab69-a2287d354fcb tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: 1b820a12-4ca5-4b89-9016-81ebac4f1c3b] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1970.020885] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dad9d971-497c-4a2d-8079-ac7432793c4a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1970.027700] env[62405]: DEBUG oslo_vmware.api [None req-59a47073-dc2f-443a-ab69-a2287d354fcb tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Waiting for the task: (returnval){ [ 1970.027700] env[62405]: value = "task-1947942" [ 1970.027700] env[62405]: _type = "Task" [ 1970.027700] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1970.033803] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947934, 'name': CreateVM_Task} progress is 25%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1970.035649] env[62405]: INFO nova.compute.claims [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1970.043155] env[62405]: DEBUG oslo_vmware.api [None req-59a47073-dc2f-443a-ab69-a2287d354fcb tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': task-1947942, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1970.155420] env[62405]: DEBUG oslo_vmware.api [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947941, 'name': PowerOnVM_Task} progress is 87%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1970.259876] env[62405]: DEBUG nova.network.neutron [None req-5472e0a7-a69a-4211-86bb-156b0799f8ee tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: ec0a05fc-4a11-4e07-a03c-e357a7a750ab] Updating instance_info_cache with network_info: [{"id": "1336ca88-2020-4b2c-b082-e45e1fe68506", "address": "fa:16:3e:d9:07:e2", "network": {"id": "7398b956-045a-440c-b8fc-34bad57dbc27", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1969082667-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "41626e27199f4370a2554bb243a72d41", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "50171613-b419-45e3-9ada-fcb6cd921428", "external-id": "nsx-vlan-transportzone-914", "segmentation_id": 914, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1336ca88-20", "ovs_interfaceid": "1336ca88-2020-4b2c-b082-e45e1fe68506", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1970.513663] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f178103-3f5f-4a58-a9e8-b71478d977af tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1970.514024] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2a15e5a8-5e9a-46b1-bd67-0af1f704e3ab {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1970.521862] env[62405]: DEBUG oslo_vmware.api [None req-8f178103-3f5f-4a58-a9e8-b71478d977af tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Waiting for the task: (returnval){ [ 1970.521862] env[62405]: value = "task-1947943" [ 1970.521862] env[62405]: _type = "Task" [ 1970.521862] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1970.536908] env[62405]: DEBUG oslo_vmware.api [None req-8f178103-3f5f-4a58-a9e8-b71478d977af tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1947943, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1970.541517] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947934, 'name': CreateVM_Task} progress is 25%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1970.549450] env[62405]: DEBUG oslo_vmware.api [None req-59a47073-dc2f-443a-ab69-a2287d354fcb tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': task-1947942, 'name': PowerOffVM_Task, 'duration_secs': 0.189722} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1970.549719] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-59a47073-dc2f-443a-ab69-a2287d354fcb tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: 1b820a12-4ca5-4b89-9016-81ebac4f1c3b] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1970.551776] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-59a47073-dc2f-443a-ab69-a2287d354fcb tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: 1b820a12-4ca5-4b89-9016-81ebac4f1c3b] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1970.551776] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c885059b-0d31-471a-bdba-76792c76727f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1970.595131] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-8cd04a83-6bc6-4493-91b4-b1504d0994e6 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1970.595752] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6404af0d-7270-4ac3-848d-2b5ff23d37fe {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1970.602956] env[62405]: DEBUG oslo_vmware.api [None req-8cd04a83-6bc6-4493-91b4-b1504d0994e6 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Waiting for the task: (returnval){ [ 1970.602956] env[62405]: value = "task-1947945" [ 1970.602956] env[62405]: _type = "Task" [ 1970.602956] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1970.611498] env[62405]: DEBUG oslo_vmware.api [None req-8cd04a83-6bc6-4493-91b4-b1504d0994e6 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1947945, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1970.651987] env[62405]: DEBUG oslo_vmware.api [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947941, 'name': PowerOnVM_Task, 'duration_secs': 0.683493} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1970.652175] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1970.652342] env[62405]: INFO nova.compute.manager [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] Took 8.85 seconds to spawn the instance on the hypervisor. [ 1970.652527] env[62405]: DEBUG nova.compute.manager [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1970.653359] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b53a4bbe-6401-43f7-b3a2-6f5c047ba7c9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1970.763120] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5472e0a7-a69a-4211-86bb-156b0799f8ee tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Releasing lock "refresh_cache-ec0a05fc-4a11-4e07-a03c-e357a7a750ab" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1971.035398] env[62405]: DEBUG oslo_vmware.api [None req-8f178103-3f5f-4a58-a9e8-b71478d977af tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1947943, 'name': PowerOffVM_Task, 'duration_secs': 0.200549} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1971.038454] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f178103-3f5f-4a58-a9e8-b71478d977af tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1971.038739] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-8f178103-3f5f-4a58-a9e8-b71478d977af tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1971.038969] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947934, 'name': CreateVM_Task} progress is 25%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1971.039637] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a0ae0f9-d06b-41e5-9662-4b7f63990791 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1971.045685] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-8f178103-3f5f-4a58-a9e8-b71478d977af tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1971.045900] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-40fce4af-def1-421b-9569-690de550d1d5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1971.113411] env[62405]: DEBUG oslo_vmware.api [None req-8cd04a83-6bc6-4493-91b4-b1504d0994e6 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1947945, 'name': PowerOffVM_Task, 'duration_secs': 0.224489} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1971.113755] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-8cd04a83-6bc6-4493-91b4-b1504d0994e6 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1971.113999] env[62405]: DEBUG nova.compute.manager [None req-8cd04a83-6bc6-4493-91b4-b1504d0994e6 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1971.114793] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-285fc13b-e780-49b8-bbc8-3003e4d0c178 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1971.173642] env[62405]: INFO nova.compute.manager [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] Took 21.70 seconds to build instance. [ 1971.326619] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fd5ec93-f8cd-4cba-a74c-7a9c52f5fcaf {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1971.334383] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9d24b15-81a5-4b90-af23-716d619a8e5b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1971.364325] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d5e1ae5-7c79-4874-b0d0-8a8720d0e1da {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1971.371278] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53a0cbc1-571a-4395-8f0b-ec70ffb54f48 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1971.383785] env[62405]: DEBUG nova.compute.provider_tree [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1971.537766] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947934, 'name': CreateVM_Task} progress is 25%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1971.627132] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8cd04a83-6bc6-4493-91b4-b1504d0994e6 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Lock "b495f9e6-60c8-4509-a34f-2e7ed59b6d82" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.047s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1971.675335] env[62405]: DEBUG oslo_concurrency.lockutils [None req-50ab28ce-e0cd-4d2e-99f1-74239e46f3cb tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Lock "15718289-5c19-4c2d-a9d8-d30ce0d63c68" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.208s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1971.742733] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-8f178103-3f5f-4a58-a9e8-b71478d977af tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1971.742993] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-8f178103-3f5f-4a58-a9e8-b71478d977af tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1971.743207] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f178103-3f5f-4a58-a9e8-b71478d977af tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Deleting the datastore file [datastore1] 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1971.743521] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a0a81de4-fd4c-4f52-8ca7-f44f312e9831 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1971.751047] env[62405]: DEBUG oslo_vmware.api [None req-8f178103-3f5f-4a58-a9e8-b71478d977af tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Waiting for the task: (returnval){ [ 1971.751047] env[62405]: value = "task-1947947" [ 1971.751047] env[62405]: _type = "Task" [ 1971.751047] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1971.759793] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-59a47073-dc2f-443a-ab69-a2287d354fcb tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: 1b820a12-4ca5-4b89-9016-81ebac4f1c3b] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1971.760045] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-59a47073-dc2f-443a-ab69-a2287d354fcb tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: 1b820a12-4ca5-4b89-9016-81ebac4f1c3b] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1971.760251] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-59a47073-dc2f-443a-ab69-a2287d354fcb tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Deleting the datastore file [datastore1] 1b820a12-4ca5-4b89-9016-81ebac4f1c3b {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1971.760825] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4e390e81-db6d-41c7-90fa-ebc6e49367c8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1971.768033] env[62405]: DEBUG oslo_vmware.api [None req-8f178103-3f5f-4a58-a9e8-b71478d977af tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1947947, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1971.772583] env[62405]: DEBUG oslo_vmware.api [None req-59a47073-dc2f-443a-ab69-a2287d354fcb tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Waiting for the task: (returnval){ [ 1971.772583] env[62405]: value = "task-1947948" [ 1971.772583] env[62405]: _type = "Task" [ 1971.772583] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1971.785681] env[62405]: DEBUG oslo_vmware.api [None req-59a47073-dc2f-443a-ab69-a2287d354fcb tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': task-1947948, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1971.887520] env[62405]: DEBUG nova.scheduler.client.report [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1972.038020] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947934, 'name': CreateVM_Task, 'duration_secs': 5.499273} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1972.038210] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 14512ed2-9eae-4753-b83c-8c0d0d5d9432] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1972.038957] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1972.039139] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1972.039466] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1972.039760] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-895c0680-ac2d-40fc-a3d6-8fe599925a51 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.044355] env[62405]: DEBUG oslo_vmware.api [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Waiting for the task: (returnval){ [ 1972.044355] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52bc6922-385c-6682-6a99-f2af5782f8f9" [ 1972.044355] env[62405]: _type = "Task" [ 1972.044355] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1972.052434] env[62405]: DEBUG oslo_vmware.api [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52bc6922-385c-6682-6a99-f2af5782f8f9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1972.261971] env[62405]: DEBUG nova.objects.instance [None req-c141bf50-eff7-4504-b92e-cc8f3c0322c4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Lazy-loading 'flavor' on Instance uuid b495f9e6-60c8-4509-a34f-2e7ed59b6d82 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1972.263300] env[62405]: DEBUG oslo_vmware.api [None req-8f178103-3f5f-4a58-a9e8-b71478d977af tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1947947, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.155548} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1972.263761] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f178103-3f5f-4a58-a9e8-b71478d977af tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1972.263965] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-8f178103-3f5f-4a58-a9e8-b71478d977af tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1972.264160] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-8f178103-3f5f-4a58-a9e8-b71478d977af tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1972.281142] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff80aa07-7b3e-41ef-a1f6-c7f84a2ed697 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.290317] env[62405]: DEBUG oslo_vmware.api [None req-59a47073-dc2f-443a-ab69-a2287d354fcb tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Task: {'id': task-1947948, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.150334} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1972.304040] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-59a47073-dc2f-443a-ab69-a2287d354fcb tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1972.304283] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-59a47073-dc2f-443a-ab69-a2287d354fcb tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: 1b820a12-4ca5-4b89-9016-81ebac4f1c3b] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1972.304464] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-59a47073-dc2f-443a-ab69-a2287d354fcb tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: 1b820a12-4ca5-4b89-9016-81ebac4f1c3b] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1972.304639] env[62405]: INFO nova.compute.manager [None req-59a47073-dc2f-443a-ab69-a2287d354fcb tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] [instance: 1b820a12-4ca5-4b89-9016-81ebac4f1c3b] Took 2.29 seconds to destroy the instance on the hypervisor. [ 1972.304882] env[62405]: DEBUG oslo.service.loopingcall [None req-59a47073-dc2f-443a-ab69-a2287d354fcb tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1972.305541] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-5472e0a7-a69a-4211-86bb-156b0799f8ee tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: ec0a05fc-4a11-4e07-a03c-e357a7a750ab] Updating instance 'ec0a05fc-4a11-4e07-a03c-e357a7a750ab' progress to 0 {{(pid=62405) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1972.309136] env[62405]: DEBUG nova.compute.manager [-] [instance: 1b820a12-4ca5-4b89-9016-81ebac4f1c3b] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1972.309299] env[62405]: DEBUG nova.network.neutron [-] [instance: 1b820a12-4ca5-4b89-9016-81ebac4f1c3b] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1972.392910] env[62405]: DEBUG oslo_concurrency.lockutils [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.372s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1972.395250] env[62405]: DEBUG oslo_concurrency.lockutils [None req-46193ebe-2a0d-417d-972c-1ca9633cec16 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.983s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1972.395454] env[62405]: DEBUG oslo_concurrency.lockutils [None req-46193ebe-2a0d-417d-972c-1ca9633cec16 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1972.397325] env[62405]: DEBUG oslo_concurrency.lockutils [None req-958236ab-98a7-438b-8acf-f1d28209a6b1 tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.522s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1972.397562] env[62405]: DEBUG nova.objects.instance [None req-958236ab-98a7-438b-8acf-f1d28209a6b1 tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Lazy-loading 'resources' on Instance uuid 1f8293f9-5fba-4bf4-bf7c-65837c1092a0 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1972.430897] env[62405]: INFO nova.scheduler.client.report [None req-46193ebe-2a0d-417d-972c-1ca9633cec16 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Deleted allocations for instance 81aebf11-5d80-4a86-b232-3ecc5f3892c2 [ 1972.470118] env[62405]: INFO nova.network.neutron [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Updating port dba92750-bf41-4683-b71d-128391ff29d0 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1972.557074] env[62405]: DEBUG oslo_vmware.api [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52bc6922-385c-6682-6a99-f2af5782f8f9, 'name': SearchDatastore_Task, 'duration_secs': 0.008961} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1972.557491] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1972.557845] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 14512ed2-9eae-4753-b83c-8c0d0d5d9432] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1972.557913] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1972.560752] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1972.560752] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1972.560752] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d2be6217-0c21-42cb-b5b9-82349e3907dd {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.570024] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1972.570024] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1972.570024] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5cb9b3de-a6d6-46e1-8b5f-8ab585ceb5eb {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.575966] env[62405]: DEBUG oslo_vmware.api [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Waiting for the task: (returnval){ [ 1972.575966] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52a6d33f-eba0-0c66-a242-374f2392a632" [ 1972.575966] env[62405]: _type = "Task" [ 1972.575966] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1972.587743] env[62405]: DEBUG oslo_vmware.api [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52a6d33f-eba0-0c66-a242-374f2392a632, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1972.649302] env[62405]: DEBUG nova.compute.manager [req-cffc15c8-f109-4122-a15a-8276d2a78119 req-f0287c0e-7d00-443d-a017-fa8b9ca30a9f service nova] [instance: 1b820a12-4ca5-4b89-9016-81ebac4f1c3b] Received event network-vif-deleted-651237ae-dab1-45f6-9177-7e1f6cda628b {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1972.649302] env[62405]: INFO nova.compute.manager [req-cffc15c8-f109-4122-a15a-8276d2a78119 req-f0287c0e-7d00-443d-a017-fa8b9ca30a9f service nova] [instance: 1b820a12-4ca5-4b89-9016-81ebac4f1c3b] Neutron deleted interface 651237ae-dab1-45f6-9177-7e1f6cda628b; detaching it from the instance and deleting it from the info cache [ 1972.649302] env[62405]: DEBUG nova.network.neutron [req-cffc15c8-f109-4122-a15a-8276d2a78119 req-f0287c0e-7d00-443d-a017-fa8b9ca30a9f service nova] [instance: 1b820a12-4ca5-4b89-9016-81ebac4f1c3b] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1972.771686] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c141bf50-eff7-4504-b92e-cc8f3c0322c4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Acquiring lock "refresh_cache-b495f9e6-60c8-4509-a34f-2e7ed59b6d82" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1972.772236] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c141bf50-eff7-4504-b92e-cc8f3c0322c4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Acquired lock "refresh_cache-b495f9e6-60c8-4509-a34f-2e7ed59b6d82" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1972.773079] env[62405]: DEBUG nova.network.neutron [None req-c141bf50-eff7-4504-b92e-cc8f3c0322c4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1972.773079] env[62405]: DEBUG nova.objects.instance [None req-c141bf50-eff7-4504-b92e-cc8f3c0322c4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Lazy-loading 'info_cache' on Instance uuid b495f9e6-60c8-4509-a34f-2e7ed59b6d82 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1972.815022] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-5472e0a7-a69a-4211-86bb-156b0799f8ee tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: ec0a05fc-4a11-4e07-a03c-e357a7a750ab] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1972.815022] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fc3e432a-215d-46a8-a18e-31c074be8aad {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.825139] env[62405]: DEBUG oslo_vmware.api [None req-5472e0a7-a69a-4211-86bb-156b0799f8ee tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for the task: (returnval){ [ 1972.825139] env[62405]: value = "task-1947949" [ 1972.825139] env[62405]: _type = "Task" [ 1972.825139] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1972.836093] env[62405]: DEBUG oslo_vmware.api [None req-5472e0a7-a69a-4211-86bb-156b0799f8ee tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947949, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1972.853950] env[62405]: DEBUG nova.network.neutron [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: d937c90c-10b2-4c57-b1db-7b433c3d9017] Successfully updated port: 75ce34f4-4ddb-4250-95f6-4b8af0eb3bdc {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1972.942123] env[62405]: DEBUG oslo_concurrency.lockutils [None req-46193ebe-2a0d-417d-972c-1ca9633cec16 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Lock "81aebf11-5d80-4a86-b232-3ecc5f3892c2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.555s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1973.094657] env[62405]: DEBUG oslo_vmware.api [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52a6d33f-eba0-0c66-a242-374f2392a632, 'name': SearchDatastore_Task, 'duration_secs': 0.018264} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1973.097968] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3b282cec-a294-4f30-945b-98d1c2db7909 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.104464] env[62405]: DEBUG nova.network.neutron [-] [instance: 1b820a12-4ca5-4b89-9016-81ebac4f1c3b] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1973.112533] env[62405]: DEBUG oslo_vmware.api [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Waiting for the task: (returnval){ [ 1973.112533] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52148cbb-9f6f-71a2-9d7a-1bfda2f0fb6a" [ 1973.112533] env[62405]: _type = "Task" [ 1973.112533] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1973.122326] env[62405]: DEBUG oslo_vmware.api [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52148cbb-9f6f-71a2-9d7a-1bfda2f0fb6a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1973.152794] env[62405]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ea9c8b3f-dde4-4cab-bf4c-61d0694f0e7c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.165091] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-914acf07-c35b-4f9f-a76e-ab8a87e5d620 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.202477] env[62405]: DEBUG nova.compute.manager [req-cffc15c8-f109-4122-a15a-8276d2a78119 req-f0287c0e-7d00-443d-a017-fa8b9ca30a9f service nova] [instance: 1b820a12-4ca5-4b89-9016-81ebac4f1c3b] Detach interface failed, port_id=651237ae-dab1-45f6-9177-7e1f6cda628b, reason: Instance 1b820a12-4ca5-4b89-9016-81ebac4f1c3b could not be found. {{(pid=62405) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11483}} [ 1973.277985] env[62405]: DEBUG nova.objects.base [None req-c141bf50-eff7-4504-b92e-cc8f3c0322c4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=62405) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1973.298888] env[62405]: DEBUG nova.compute.manager [req-68d00954-2a61-49ad-920a-bff900ae1b4d req-07e31531-58e7-4a88-92fd-fb5585d2555f service nova] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] Received event network-changed-d440b728-2371-4e75-bb9f-2330f0318cae {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1973.298888] env[62405]: DEBUG nova.compute.manager [req-68d00954-2a61-49ad-920a-bff900ae1b4d req-07e31531-58e7-4a88-92fd-fb5585d2555f service nova] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] Refreshing instance network info cache due to event network-changed-d440b728-2371-4e75-bb9f-2330f0318cae. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1973.298888] env[62405]: DEBUG oslo_concurrency.lockutils [req-68d00954-2a61-49ad-920a-bff900ae1b4d req-07e31531-58e7-4a88-92fd-fb5585d2555f service nova] Acquiring lock "refresh_cache-556e1bca-f2f1-4200-96df-997d48ce5a15" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1973.298888] env[62405]: DEBUG oslo_concurrency.lockutils [req-68d00954-2a61-49ad-920a-bff900ae1b4d req-07e31531-58e7-4a88-92fd-fb5585d2555f service nova] Acquired lock "refresh_cache-556e1bca-f2f1-4200-96df-997d48ce5a15" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1973.298888] env[62405]: DEBUG nova.network.neutron [req-68d00954-2a61-49ad-920a-bff900ae1b4d req-07e31531-58e7-4a88-92fd-fb5585d2555f service nova] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] Refreshing network info cache for port d440b728-2371-4e75-bb9f-2330f0318cae {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1973.313446] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb897b1e-dfe9-4f81-95b2-62c016a8ff8c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.318943] env[62405]: DEBUG nova.virt.hardware [None req-8f178103-3f5f-4a58-a9e8-b71478d977af tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1973.319198] env[62405]: DEBUG nova.virt.hardware [None req-8f178103-3f5f-4a58-a9e8-b71478d977af tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1973.319370] env[62405]: DEBUG nova.virt.hardware [None req-8f178103-3f5f-4a58-a9e8-b71478d977af tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1973.319585] env[62405]: DEBUG nova.virt.hardware [None req-8f178103-3f5f-4a58-a9e8-b71478d977af tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1973.319735] env[62405]: DEBUG nova.virt.hardware [None req-8f178103-3f5f-4a58-a9e8-b71478d977af tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1973.319896] env[62405]: DEBUG nova.virt.hardware [None req-8f178103-3f5f-4a58-a9e8-b71478d977af tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1973.320119] env[62405]: DEBUG nova.virt.hardware [None req-8f178103-3f5f-4a58-a9e8-b71478d977af tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1973.320340] env[62405]: DEBUG nova.virt.hardware [None req-8f178103-3f5f-4a58-a9e8-b71478d977af tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1973.320457] env[62405]: DEBUG nova.virt.hardware [None req-8f178103-3f5f-4a58-a9e8-b71478d977af tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1973.320622] env[62405]: DEBUG nova.virt.hardware [None req-8f178103-3f5f-4a58-a9e8-b71478d977af tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1973.320799] env[62405]: DEBUG nova.virt.hardware [None req-8f178103-3f5f-4a58-a9e8-b71478d977af tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1973.322081] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af4ce60d-203a-4258-9d92-3c15a64ecbea {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.333477] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e7046ac-5637-4193-86e0-b8f0eace6ec6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.340679] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20248645-e9a1-4175-ab4f-4723fca9c7c4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.344681] env[62405]: DEBUG oslo_vmware.api [None req-5472e0a7-a69a-4211-86bb-156b0799f8ee tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947949, 'name': PowerOffVM_Task, 'duration_secs': 0.212373} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1973.345335] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-5472e0a7-a69a-4211-86bb-156b0799f8ee tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: ec0a05fc-4a11-4e07-a03c-e357a7a750ab] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1973.345527] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-5472e0a7-a69a-4211-86bb-156b0799f8ee tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: ec0a05fc-4a11-4e07-a03c-e357a7a750ab] Updating instance 'ec0a05fc-4a11-4e07-a03c-e357a7a750ab' progress to 17 {{(pid=62405) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1973.372573] env[62405]: DEBUG oslo_concurrency.lockutils [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquiring lock "refresh_cache-d937c90c-10b2-4c57-b1db-7b433c3d9017" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1973.372746] env[62405]: DEBUG oslo_concurrency.lockutils [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquired lock "refresh_cache-d937c90c-10b2-4c57-b1db-7b433c3d9017" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1973.372904] env[62405]: DEBUG nova.network.neutron [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: d937c90c-10b2-4c57-b1db-7b433c3d9017] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1973.383079] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7478104-8f2c-458e-866a-f82c01a0077d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.386444] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-8f178103-3f5f-4a58-a9e8-b71478d977af tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5c:57:a9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6399297e-11b6-47b0-9a9f-712bb90b6ea1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '04edbefd-e96c-47d6-bfd7-72fb2a759156', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1973.399186] env[62405]: DEBUG oslo.service.loopingcall [None req-8f178103-3f5f-4a58-a9e8-b71478d977af tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1973.399186] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1973.399186] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c8c4ade9-59e7-4c0b-bd7f-76414190a4fb {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.416039] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37e1667d-46e8-4edf-b8bb-12eef9494761 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.420986] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1973.420986] env[62405]: value = "task-1947950" [ 1973.420986] env[62405]: _type = "Task" [ 1973.420986] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1973.432702] env[62405]: DEBUG nova.compute.provider_tree [None req-958236ab-98a7-438b-8acf-f1d28209a6b1 tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1973.438957] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947950, 'name': CreateVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1973.611204] env[62405]: INFO nova.compute.manager [-] [instance: 1b820a12-4ca5-4b89-9016-81ebac4f1c3b] Took 1.30 seconds to deallocate network for instance. [ 1973.626060] env[62405]: DEBUG oslo_vmware.api [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52148cbb-9f6f-71a2-9d7a-1bfda2f0fb6a, 'name': SearchDatastore_Task, 'duration_secs': 0.009291} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1973.626585] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1973.626585] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 14512ed2-9eae-4753-b83c-8c0d0d5d9432/14512ed2-9eae-4753-b83c-8c0d0d5d9432.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1973.626852] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cf496f50-009f-45d5-ad1f-940c49033b91 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.633403] env[62405]: DEBUG oslo_vmware.api [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Waiting for the task: (returnval){ [ 1973.633403] env[62405]: value = "task-1947951" [ 1973.633403] env[62405]: _type = "Task" [ 1973.633403] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1973.641263] env[62405]: DEBUG oslo_vmware.api [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1947951, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1973.877785] env[62405]: DEBUG nova.virt.hardware [None req-5472e0a7-a69a-4211-86bb-156b0799f8ee tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:21Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1973.878119] env[62405]: DEBUG nova.virt.hardware [None req-5472e0a7-a69a-4211-86bb-156b0799f8ee tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1973.878262] env[62405]: DEBUG nova.virt.hardware [None req-5472e0a7-a69a-4211-86bb-156b0799f8ee tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1973.878486] env[62405]: DEBUG nova.virt.hardware [None req-5472e0a7-a69a-4211-86bb-156b0799f8ee tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1973.878715] env[62405]: DEBUG nova.virt.hardware [None req-5472e0a7-a69a-4211-86bb-156b0799f8ee tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1973.878883] env[62405]: DEBUG nova.virt.hardware [None req-5472e0a7-a69a-4211-86bb-156b0799f8ee tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1973.879289] env[62405]: DEBUG nova.virt.hardware [None req-5472e0a7-a69a-4211-86bb-156b0799f8ee tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1973.879611] env[62405]: DEBUG nova.virt.hardware [None req-5472e0a7-a69a-4211-86bb-156b0799f8ee tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1973.879755] env[62405]: DEBUG nova.virt.hardware [None req-5472e0a7-a69a-4211-86bb-156b0799f8ee tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1973.879995] env[62405]: DEBUG nova.virt.hardware [None req-5472e0a7-a69a-4211-86bb-156b0799f8ee tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1973.880256] env[62405]: DEBUG nova.virt.hardware [None req-5472e0a7-a69a-4211-86bb-156b0799f8ee tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1973.890251] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e4136a5c-d6f1-4e68-850a-1572c2b2c44b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.909637] env[62405]: DEBUG oslo_vmware.api [None req-5472e0a7-a69a-4211-86bb-156b0799f8ee tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for the task: (returnval){ [ 1973.909637] env[62405]: value = "task-1947952" [ 1973.909637] env[62405]: _type = "Task" [ 1973.909637] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1973.918424] env[62405]: DEBUG oslo_vmware.api [None req-5472e0a7-a69a-4211-86bb-156b0799f8ee tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947952, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1973.933767] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947950, 'name': CreateVM_Task, 'duration_secs': 0.353441} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1973.934014] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1973.934957] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8f178103-3f5f-4a58-a9e8-b71478d977af tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1973.935271] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8f178103-3f5f-4a58-a9e8-b71478d977af tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1973.935798] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8f178103-3f5f-4a58-a9e8-b71478d977af tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1973.936959] env[62405]: DEBUG nova.scheduler.client.report [None req-958236ab-98a7-438b-8acf-f1d28209a6b1 tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1973.940683] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-02006866-9049-46c1-8ac0-29af9cd87540 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.948336] env[62405]: DEBUG oslo_vmware.api [None req-8f178103-3f5f-4a58-a9e8-b71478d977af tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Waiting for the task: (returnval){ [ 1973.948336] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5228454d-a690-863e-a64b-752ec5345a68" [ 1973.948336] env[62405]: _type = "Task" [ 1973.948336] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1973.957239] env[62405]: DEBUG oslo_vmware.api [None req-8f178103-3f5f-4a58-a9e8-b71478d977af tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5228454d-a690-863e-a64b-752ec5345a68, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1973.968893] env[62405]: DEBUG nova.network.neutron [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: d937c90c-10b2-4c57-b1db-7b433c3d9017] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1974.121519] env[62405]: DEBUG oslo_concurrency.lockutils [None req-59a47073-dc2f-443a-ab69-a2287d354fcb tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1974.146686] env[62405]: DEBUG oslo_vmware.api [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1947951, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1974.232051] env[62405]: DEBUG nova.network.neutron [None req-c141bf50-eff7-4504-b92e-cc8f3c0322c4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Updating instance_info_cache with network_info: [{"id": "a7c7d269-027f-42d9-819a-e04ab445d816", "address": "fa:16:3e:c7:9c:e0", "network": {"id": "f9883b88-e1ff-4499-9214-4cc672383a10", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1580742860-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dd9a1a4650b34e388c50c7575cf09a7c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0a88b707-352e-4be7-b1d6-ad6074b40ed9", "external-id": "nsx-vlan-transportzone-789", "segmentation_id": 789, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa7c7d269-02", "ovs_interfaceid": "a7c7d269-027f-42d9-819a-e04ab445d816", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1974.238449] env[62405]: DEBUG nova.network.neutron [req-68d00954-2a61-49ad-920a-bff900ae1b4d req-07e31531-58e7-4a88-92fd-fb5585d2555f service nova] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] Updated VIF entry in instance network info cache for port d440b728-2371-4e75-bb9f-2330f0318cae. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1974.238449] env[62405]: DEBUG nova.network.neutron [req-68d00954-2a61-49ad-920a-bff900ae1b4d req-07e31531-58e7-4a88-92fd-fb5585d2555f service nova] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] Updating instance_info_cache with network_info: [{"id": "d440b728-2371-4e75-bb9f-2330f0318cae", "address": "fa:16:3e:ea:15:ee", "network": {"id": "8e7f7222-48db-4dd5-a9e8-9a6d2b598918", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1945720715-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba9083cddcc24345b6ea5d2cbbbec5ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd440b728-23", "ovs_interfaceid": "d440b728-2371-4e75-bb9f-2330f0318cae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1974.242330] env[62405]: DEBUG nova.network.neutron [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: d937c90c-10b2-4c57-b1db-7b433c3d9017] Updating instance_info_cache with network_info: [{"id": "75ce34f4-4ddb-4250-95f6-4b8af0eb3bdc", "address": "fa:16:3e:04:47:1b", "network": {"id": "845c4582-a0c8-4565-8025-5cc0fd22ff9a", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1066509768-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f1a1645e38674042828c78155974f95e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap75ce34f4-4d", "ovs_interfaceid": "75ce34f4-4ddb-4250-95f6-4b8af0eb3bdc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1974.422875] env[62405]: DEBUG oslo_vmware.api [None req-5472e0a7-a69a-4211-86bb-156b0799f8ee tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947952, 'name': ReconfigVM_Task, 'duration_secs': 0.475314} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1974.430472] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-5472e0a7-a69a-4211-86bb-156b0799f8ee tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: ec0a05fc-4a11-4e07-a03c-e357a7a750ab] Updating instance 'ec0a05fc-4a11-4e07-a03c-e357a7a750ab' progress to 33 {{(pid=62405) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1974.444474] env[62405]: DEBUG oslo_concurrency.lockutils [None req-958236ab-98a7-438b-8acf-f1d28209a6b1 tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.047s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1974.446780] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.665s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1974.448342] env[62405]: INFO nova.compute.claims [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 5645c9a4-2640-4190-956f-00fc2ea03a3a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1974.462852] env[62405]: DEBUG oslo_vmware.api [None req-8f178103-3f5f-4a58-a9e8-b71478d977af tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5228454d-a690-863e-a64b-752ec5345a68, 'name': SearchDatastore_Task, 'duration_secs': 0.04775} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1974.463194] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8f178103-3f5f-4a58-a9e8-b71478d977af tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1974.463440] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-8f178103-3f5f-4a58-a9e8-b71478d977af tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1974.463727] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8f178103-3f5f-4a58-a9e8-b71478d977af tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1974.464015] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8f178103-3f5f-4a58-a9e8-b71478d977af tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1974.464317] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f178103-3f5f-4a58-a9e8-b71478d977af tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1974.464414] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1618f0e0-b45a-4336-a1a5-87c3e2a40ad3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1974.474626] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f178103-3f5f-4a58-a9e8-b71478d977af tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1974.474843] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-8f178103-3f5f-4a58-a9e8-b71478d977af tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1974.475598] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8dd7df35-42e7-4664-856d-df48671e4f60 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1974.481711] env[62405]: INFO nova.scheduler.client.report [None req-958236ab-98a7-438b-8acf-f1d28209a6b1 tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Deleted allocations for instance 1f8293f9-5fba-4bf4-bf7c-65837c1092a0 [ 1974.489063] env[62405]: DEBUG oslo_vmware.api [None req-8f178103-3f5f-4a58-a9e8-b71478d977af tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Waiting for the task: (returnval){ [ 1974.489063] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]522928ca-bd8f-f5ae-ecc2-b44025888ded" [ 1974.489063] env[62405]: _type = "Task" [ 1974.489063] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1974.497656] env[62405]: DEBUG oslo_vmware.api [None req-8f178103-3f5f-4a58-a9e8-b71478d977af tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]522928ca-bd8f-f5ae-ecc2-b44025888ded, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1974.646014] env[62405]: DEBUG oslo_vmware.api [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1947951, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.518505} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1974.646493] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 14512ed2-9eae-4753-b83c-8c0d0d5d9432/14512ed2-9eae-4753-b83c-8c0d0d5d9432.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1974.646833] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 14512ed2-9eae-4753-b83c-8c0d0d5d9432] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1974.646833] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-be72b6fb-cbbb-4df0-912d-fbb6f75ef2e3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1974.654721] env[62405]: DEBUG oslo_vmware.api [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Waiting for the task: (returnval){ [ 1974.654721] env[62405]: value = "task-1947953" [ 1974.654721] env[62405]: _type = "Task" [ 1974.654721] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1974.666580] env[62405]: DEBUG oslo_vmware.api [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1947953, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1974.684802] env[62405]: DEBUG nova.compute.manager [req-26c138d5-8223-491a-956e-a912e6cdede4 req-49c45db5-d156-4612-8415-475d25a49027 service nova] [instance: d937c90c-10b2-4c57-b1db-7b433c3d9017] Received event network-vif-plugged-75ce34f4-4ddb-4250-95f6-4b8af0eb3bdc {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1974.684998] env[62405]: DEBUG oslo_concurrency.lockutils [req-26c138d5-8223-491a-956e-a912e6cdede4 req-49c45db5-d156-4612-8415-475d25a49027 service nova] Acquiring lock "d937c90c-10b2-4c57-b1db-7b433c3d9017-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1974.685229] env[62405]: DEBUG oslo_concurrency.lockutils [req-26c138d5-8223-491a-956e-a912e6cdede4 req-49c45db5-d156-4612-8415-475d25a49027 service nova] Lock "d937c90c-10b2-4c57-b1db-7b433c3d9017-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1974.685406] env[62405]: DEBUG oslo_concurrency.lockutils [req-26c138d5-8223-491a-956e-a912e6cdede4 req-49c45db5-d156-4612-8415-475d25a49027 service nova] Lock "d937c90c-10b2-4c57-b1db-7b433c3d9017-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1974.685576] env[62405]: DEBUG nova.compute.manager [req-26c138d5-8223-491a-956e-a912e6cdede4 req-49c45db5-d156-4612-8415-475d25a49027 service nova] [instance: d937c90c-10b2-4c57-b1db-7b433c3d9017] No waiting events found dispatching network-vif-plugged-75ce34f4-4ddb-4250-95f6-4b8af0eb3bdc {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1974.685744] env[62405]: WARNING nova.compute.manager [req-26c138d5-8223-491a-956e-a912e6cdede4 req-49c45db5-d156-4612-8415-475d25a49027 service nova] [instance: d937c90c-10b2-4c57-b1db-7b433c3d9017] Received unexpected event network-vif-plugged-75ce34f4-4ddb-4250-95f6-4b8af0eb3bdc for instance with vm_state building and task_state spawning. [ 1974.685901] env[62405]: DEBUG nova.compute.manager [req-26c138d5-8223-491a-956e-a912e6cdede4 req-49c45db5-d156-4612-8415-475d25a49027 service nova] [instance: d937c90c-10b2-4c57-b1db-7b433c3d9017] Received event network-changed-75ce34f4-4ddb-4250-95f6-4b8af0eb3bdc {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1974.686067] env[62405]: DEBUG nova.compute.manager [req-26c138d5-8223-491a-956e-a912e6cdede4 req-49c45db5-d156-4612-8415-475d25a49027 service nova] [instance: d937c90c-10b2-4c57-b1db-7b433c3d9017] Refreshing instance network info cache due to event network-changed-75ce34f4-4ddb-4250-95f6-4b8af0eb3bdc. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1974.686237] env[62405]: DEBUG oslo_concurrency.lockutils [req-26c138d5-8223-491a-956e-a912e6cdede4 req-49c45db5-d156-4612-8415-475d25a49027 service nova] Acquiring lock "refresh_cache-d937c90c-10b2-4c57-b1db-7b433c3d9017" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1974.739489] env[62405]: DEBUG oslo_concurrency.lockutils [req-68d00954-2a61-49ad-920a-bff900ae1b4d req-07e31531-58e7-4a88-92fd-fb5585d2555f service nova] Releasing lock "refresh_cache-556e1bca-f2f1-4200-96df-997d48ce5a15" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1974.740179] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c141bf50-eff7-4504-b92e-cc8f3c0322c4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Releasing lock "refresh_cache-b495f9e6-60c8-4509-a34f-2e7ed59b6d82" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1974.744279] env[62405]: DEBUG oslo_concurrency.lockutils [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Releasing lock "refresh_cache-d937c90c-10b2-4c57-b1db-7b433c3d9017" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1974.744564] env[62405]: DEBUG nova.compute.manager [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: d937c90c-10b2-4c57-b1db-7b433c3d9017] Instance network_info: |[{"id": "75ce34f4-4ddb-4250-95f6-4b8af0eb3bdc", "address": "fa:16:3e:04:47:1b", "network": {"id": "845c4582-a0c8-4565-8025-5cc0fd22ff9a", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1066509768-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f1a1645e38674042828c78155974f95e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap75ce34f4-4d", "ovs_interfaceid": "75ce34f4-4ddb-4250-95f6-4b8af0eb3bdc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1974.744990] env[62405]: DEBUG oslo_concurrency.lockutils [req-26c138d5-8223-491a-956e-a912e6cdede4 req-49c45db5-d156-4612-8415-475d25a49027 service nova] Acquired lock "refresh_cache-d937c90c-10b2-4c57-b1db-7b433c3d9017" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1974.745413] env[62405]: DEBUG nova.network.neutron [req-26c138d5-8223-491a-956e-a912e6cdede4 req-49c45db5-d156-4612-8415-475d25a49027 service nova] [instance: d937c90c-10b2-4c57-b1db-7b433c3d9017] Refreshing network info cache for port 75ce34f4-4ddb-4250-95f6-4b8af0eb3bdc {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1974.746406] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: d937c90c-10b2-4c57-b1db-7b433c3d9017] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:04:47:1b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ca83c3bc-f3ec-42ab-85b3-192512f766f3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '75ce34f4-4ddb-4250-95f6-4b8af0eb3bdc', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1974.753887] env[62405]: DEBUG oslo.service.loopingcall [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1974.755054] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d937c90c-10b2-4c57-b1db-7b433c3d9017] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1974.755768] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-20f55669-c736-4ea5-bd0b-d0e64a0e5bc9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1974.775646] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1974.775646] env[62405]: value = "task-1947954" [ 1974.775646] env[62405]: _type = "Task" [ 1974.775646] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1974.783802] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947954, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1974.937178] env[62405]: DEBUG nova.virt.hardware [None req-5472e0a7-a69a-4211-86bb-156b0799f8ee tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1974.937445] env[62405]: DEBUG nova.virt.hardware [None req-5472e0a7-a69a-4211-86bb-156b0799f8ee tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1974.937607] env[62405]: DEBUG nova.virt.hardware [None req-5472e0a7-a69a-4211-86bb-156b0799f8ee tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1974.937793] env[62405]: DEBUG nova.virt.hardware [None req-5472e0a7-a69a-4211-86bb-156b0799f8ee tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1974.937942] env[62405]: DEBUG nova.virt.hardware [None req-5472e0a7-a69a-4211-86bb-156b0799f8ee tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1974.938392] env[62405]: DEBUG nova.virt.hardware [None req-5472e0a7-a69a-4211-86bb-156b0799f8ee tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1974.938674] env[62405]: DEBUG nova.virt.hardware [None req-5472e0a7-a69a-4211-86bb-156b0799f8ee tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1974.938851] env[62405]: DEBUG nova.virt.hardware [None req-5472e0a7-a69a-4211-86bb-156b0799f8ee tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1974.939050] env[62405]: DEBUG nova.virt.hardware [None req-5472e0a7-a69a-4211-86bb-156b0799f8ee tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1974.939233] env[62405]: DEBUG nova.virt.hardware [None req-5472e0a7-a69a-4211-86bb-156b0799f8ee tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1974.939653] env[62405]: DEBUG nova.virt.hardware [None req-5472e0a7-a69a-4211-86bb-156b0799f8ee tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1974.946352] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-5472e0a7-a69a-4211-86bb-156b0799f8ee tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: ec0a05fc-4a11-4e07-a03c-e357a7a750ab] Reconfiguring VM instance instance-00000062 to detach disk 2000 {{(pid=62405) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1974.946694] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c5ab30d1-5fac-4bd3-8220-9028c5aca93d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1974.970103] env[62405]: DEBUG oslo_vmware.api [None req-5472e0a7-a69a-4211-86bb-156b0799f8ee tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for the task: (returnval){ [ 1974.970103] env[62405]: value = "task-1947955" [ 1974.970103] env[62405]: _type = "Task" [ 1974.970103] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1974.980080] env[62405]: DEBUG oslo_vmware.api [None req-5472e0a7-a69a-4211-86bb-156b0799f8ee tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947955, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1974.994035] env[62405]: DEBUG oslo_concurrency.lockutils [None req-958236ab-98a7-438b-8acf-f1d28209a6b1 tempest-InstanceActionsNegativeTestJSON-963095504 tempest-InstanceActionsNegativeTestJSON-963095504-project-member] Lock "1f8293f9-5fba-4bf4-bf7c-65837c1092a0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.834s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1975.001553] env[62405]: DEBUG oslo_vmware.api [None req-8f178103-3f5f-4a58-a9e8-b71478d977af tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]522928ca-bd8f-f5ae-ecc2-b44025888ded, 'name': SearchDatastore_Task, 'duration_secs': 0.022245} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1975.001953] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1db0196d-dce6-47f5-84b3-8ffac9c591d7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.007712] env[62405]: DEBUG oslo_vmware.api [None req-8f178103-3f5f-4a58-a9e8-b71478d977af tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Waiting for the task: (returnval){ [ 1975.007712] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]527eb21b-4908-6273-ef82-111c2b1d78e4" [ 1975.007712] env[62405]: _type = "Task" [ 1975.007712] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1975.015674] env[62405]: DEBUG oslo_vmware.api [None req-8f178103-3f5f-4a58-a9e8-b71478d977af tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]527eb21b-4908-6273-ef82-111c2b1d78e4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1975.104135] env[62405]: DEBUG oslo_concurrency.lockutils [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Acquiring lock "refresh_cache-f16e3d13-6db6-4f61-b0e4-661856a9166b" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1975.104227] env[62405]: DEBUG oslo_concurrency.lockutils [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Acquired lock "refresh_cache-f16e3d13-6db6-4f61-b0e4-661856a9166b" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1975.104384] env[62405]: DEBUG nova.network.neutron [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1975.166520] env[62405]: DEBUG oslo_vmware.api [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1947953, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070912} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1975.170572] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 14512ed2-9eae-4753-b83c-8c0d0d5d9432] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1975.170572] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a536a4ef-9f8f-46ac-bfb4-f8bd4e7c6008 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.196971] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 14512ed2-9eae-4753-b83c-8c0d0d5d9432] Reconfiguring VM instance instance-00000067 to attach disk [datastore1] 14512ed2-9eae-4753-b83c-8c0d0d5d9432/14512ed2-9eae-4753-b83c-8c0d0d5d9432.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1975.197374] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9eaf3653-faa7-4238-aa28-9aead5d6632e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.226474] env[62405]: DEBUG oslo_vmware.api [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Waiting for the task: (returnval){ [ 1975.226474] env[62405]: value = "task-1947956" [ 1975.226474] env[62405]: _type = "Task" [ 1975.226474] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1975.236446] env[62405]: DEBUG oslo_vmware.api [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1947956, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1975.285637] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947954, 'name': CreateVM_Task} progress is 25%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1975.332566] env[62405]: DEBUG nova.compute.manager [req-d772ccb3-7c4b-4b19-8bb8-27937a1c0f91 req-56297e7c-c918-41a3-8dcb-049488b9fd80 service nova] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] Received event network-changed-3189d804-1d8d-4356-bbf0-e0bbda0a2d32 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1975.332789] env[62405]: DEBUG nova.compute.manager [req-d772ccb3-7c4b-4b19-8bb8-27937a1c0f91 req-56297e7c-c918-41a3-8dcb-049488b9fd80 service nova] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] Refreshing instance network info cache due to event network-changed-3189d804-1d8d-4356-bbf0-e0bbda0a2d32. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1975.333032] env[62405]: DEBUG oslo_concurrency.lockutils [req-d772ccb3-7c4b-4b19-8bb8-27937a1c0f91 req-56297e7c-c918-41a3-8dcb-049488b9fd80 service nova] Acquiring lock "refresh_cache-15718289-5c19-4c2d-a9d8-d30ce0d63c68" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1975.333190] env[62405]: DEBUG oslo_concurrency.lockutils [req-d772ccb3-7c4b-4b19-8bb8-27937a1c0f91 req-56297e7c-c918-41a3-8dcb-049488b9fd80 service nova] Acquired lock "refresh_cache-15718289-5c19-4c2d-a9d8-d30ce0d63c68" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1975.333358] env[62405]: DEBUG nova.network.neutron [req-d772ccb3-7c4b-4b19-8bb8-27937a1c0f91 req-56297e7c-c918-41a3-8dcb-049488b9fd80 service nova] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] Refreshing network info cache for port 3189d804-1d8d-4356-bbf0-e0bbda0a2d32 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1975.485404] env[62405]: DEBUG oslo_vmware.api [None req-5472e0a7-a69a-4211-86bb-156b0799f8ee tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947955, 'name': ReconfigVM_Task, 'duration_secs': 0.418105} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1975.485676] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-5472e0a7-a69a-4211-86bb-156b0799f8ee tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: ec0a05fc-4a11-4e07-a03c-e357a7a750ab] Reconfigured VM instance instance-00000062 to detach disk 2000 {{(pid=62405) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1975.486687] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cfdcbbf-22ad-4ad8-97e0-8cd2d3b20c33 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.511326] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-5472e0a7-a69a-4211-86bb-156b0799f8ee tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: ec0a05fc-4a11-4e07-a03c-e357a7a750ab] Reconfiguring VM instance instance-00000062 to attach disk [datastore1] ec0a05fc-4a11-4e07-a03c-e357a7a750ab/ec0a05fc-4a11-4e07-a03c-e357a7a750ab.vmdk or device None with type thin {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1975.514503] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d0c71853-97e8-4065-b441-de7702980690 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.539272] env[62405]: DEBUG oslo_vmware.api [None req-8f178103-3f5f-4a58-a9e8-b71478d977af tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]527eb21b-4908-6273-ef82-111c2b1d78e4, 'name': SearchDatastore_Task, 'duration_secs': 0.034928} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1975.540689] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8f178103-3f5f-4a58-a9e8-b71478d977af tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1975.540955] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f178103-3f5f-4a58-a9e8-b71478d977af tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d/78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1975.541313] env[62405]: DEBUG oslo_vmware.api [None req-5472e0a7-a69a-4211-86bb-156b0799f8ee tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for the task: (returnval){ [ 1975.541313] env[62405]: value = "task-1947957" [ 1975.541313] env[62405]: _type = "Task" [ 1975.541313] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1975.541519] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-148f5bc1-2e27-410f-a100-9b32c8c2fb5c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.555091] env[62405]: DEBUG oslo_vmware.api [None req-5472e0a7-a69a-4211-86bb-156b0799f8ee tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947957, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1975.556520] env[62405]: DEBUG oslo_vmware.api [None req-8f178103-3f5f-4a58-a9e8-b71478d977af tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Waiting for the task: (returnval){ [ 1975.556520] env[62405]: value = "task-1947958" [ 1975.556520] env[62405]: _type = "Task" [ 1975.556520] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1975.566744] env[62405]: DEBUG oslo_vmware.api [None req-8f178103-3f5f-4a58-a9e8-b71478d977af tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1947958, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1975.599706] env[62405]: DEBUG nova.network.neutron [req-26c138d5-8223-491a-956e-a912e6cdede4 req-49c45db5-d156-4612-8415-475d25a49027 service nova] [instance: d937c90c-10b2-4c57-b1db-7b433c3d9017] Updated VIF entry in instance network info cache for port 75ce34f4-4ddb-4250-95f6-4b8af0eb3bdc. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1975.600487] env[62405]: DEBUG nova.network.neutron [req-26c138d5-8223-491a-956e-a912e6cdede4 req-49c45db5-d156-4612-8415-475d25a49027 service nova] [instance: d937c90c-10b2-4c57-b1db-7b433c3d9017] Updating instance_info_cache with network_info: [{"id": "75ce34f4-4ddb-4250-95f6-4b8af0eb3bdc", "address": "fa:16:3e:04:47:1b", "network": {"id": "845c4582-a0c8-4565-8025-5cc0fd22ff9a", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1066509768-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f1a1645e38674042828c78155974f95e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap75ce34f4-4d", "ovs_interfaceid": "75ce34f4-4ddb-4250-95f6-4b8af0eb3bdc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1975.738273] env[62405]: DEBUG oslo_vmware.api [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1947956, 'name': ReconfigVM_Task, 'duration_secs': 0.360898} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1975.739473] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 14512ed2-9eae-4753-b83c-8c0d0d5d9432] Reconfigured VM instance instance-00000067 to attach disk [datastore1] 14512ed2-9eae-4753-b83c-8c0d0d5d9432/14512ed2-9eae-4753-b83c-8c0d0d5d9432.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1975.739473] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8eb51afa-1a0a-4d1c-a0aa-fcf1e9ee8103 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.745159] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-c141bf50-eff7-4504-b92e-cc8f3c0322c4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1975.746633] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f24fff33-dae0-4dfc-8ed9-edd71f662db2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.748258] env[62405]: DEBUG oslo_vmware.api [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Waiting for the task: (returnval){ [ 1975.748258] env[62405]: value = "task-1947959" [ 1975.748258] env[62405]: _type = "Task" [ 1975.748258] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1975.753812] env[62405]: DEBUG oslo_vmware.api [None req-c141bf50-eff7-4504-b92e-cc8f3c0322c4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Waiting for the task: (returnval){ [ 1975.753812] env[62405]: value = "task-1947960" [ 1975.753812] env[62405]: _type = "Task" [ 1975.753812] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1975.762322] env[62405]: DEBUG oslo_vmware.api [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1947959, 'name': Rename_Task} progress is 10%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1975.767417] env[62405]: DEBUG oslo_vmware.api [None req-c141bf50-eff7-4504-b92e-cc8f3c0322c4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1947960, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1975.789457] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947954, 'name': CreateVM_Task, 'duration_secs': 0.650025} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1975.789457] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d937c90c-10b2-4c57-b1db-7b433c3d9017] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1975.789740] env[62405]: DEBUG oslo_concurrency.lockutils [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1975.789908] env[62405]: DEBUG oslo_concurrency.lockutils [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1975.790335] env[62405]: DEBUG oslo_concurrency.lockutils [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1975.790635] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7d177dd7-7daa-4b33-aff7-9eb32b663449 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.795623] env[62405]: DEBUG oslo_vmware.api [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for the task: (returnval){ [ 1975.795623] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52fe4f10-4e32-c0b8-628b-847a520de94a" [ 1975.795623] env[62405]: _type = "Task" [ 1975.795623] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1975.809156] env[62405]: DEBUG oslo_vmware.api [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52fe4f10-4e32-c0b8-628b-847a520de94a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1975.880814] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d622a682-7587-4872-a5f2-ae70ff099a6f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.892268] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac1fa1e9-685e-493d-a4cb-1f1de552c4ce {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.939027] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-081b2698-640a-4232-babc-93355e0ce0d7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.951574] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcc162cf-4562-4ed1-b3d6-ea511fa56d1d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.975289] env[62405]: DEBUG nova.compute.provider_tree [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1976.055532] env[62405]: DEBUG oslo_vmware.api [None req-5472e0a7-a69a-4211-86bb-156b0799f8ee tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947957, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1976.056742] env[62405]: DEBUG nova.network.neutron [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Updating instance_info_cache with network_info: [{"id": "dba92750-bf41-4683-b71d-128391ff29d0", "address": "fa:16:3e:e6:78:c4", "network": {"id": "72000fdf-4f7a-4c95-a7ac-d8404249f55c", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-589425764-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "521150d8f23f4f76a0c785481c99e897", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "412cde91-d0f0-4193-b36b-d8b9d17384c6", "external-id": "nsx-vlan-transportzone-461", "segmentation_id": 461, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdba92750-bf", "ovs_interfaceid": "dba92750-bf41-4683-b71d-128391ff29d0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1976.070113] env[62405]: DEBUG oslo_vmware.api [None req-8f178103-3f5f-4a58-a9e8-b71478d977af tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1947958, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1976.103457] env[62405]: DEBUG oslo_concurrency.lockutils [req-26c138d5-8223-491a-956e-a912e6cdede4 req-49c45db5-d156-4612-8415-475d25a49027 service nova] Releasing lock "refresh_cache-d937c90c-10b2-4c57-b1db-7b433c3d9017" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1976.259386] env[62405]: DEBUG oslo_vmware.api [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1947959, 'name': Rename_Task, 'duration_secs': 0.173948} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1976.260221] env[62405]: DEBUG nova.network.neutron [req-d772ccb3-7c4b-4b19-8bb8-27937a1c0f91 req-56297e7c-c918-41a3-8dcb-049488b9fd80 service nova] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] Updated VIF entry in instance network info cache for port 3189d804-1d8d-4356-bbf0-e0bbda0a2d32. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1976.260628] env[62405]: DEBUG nova.network.neutron [req-d772ccb3-7c4b-4b19-8bb8-27937a1c0f91 req-56297e7c-c918-41a3-8dcb-049488b9fd80 service nova] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] Updating instance_info_cache with network_info: [{"id": "3189d804-1d8d-4356-bbf0-e0bbda0a2d32", "address": "fa:16:3e:b3:8f:fe", "network": {"id": "8e7f7222-48db-4dd5-a9e8-9a6d2b598918", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1945720715-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba9083cddcc24345b6ea5d2cbbbec5ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3189d804-1d", "ovs_interfaceid": "3189d804-1d8d-4356-bbf0-e0bbda0a2d32", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1976.266162] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 14512ed2-9eae-4753-b83c-8c0d0d5d9432] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1976.266162] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5191a0f6-b170-40c8-967d-d2271f145017 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1976.273800] env[62405]: DEBUG oslo_vmware.api [None req-c141bf50-eff7-4504-b92e-cc8f3c0322c4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1947960, 'name': PowerOnVM_Task, 'duration_secs': 0.470677} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1976.275020] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-c141bf50-eff7-4504-b92e-cc8f3c0322c4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1976.275196] env[62405]: DEBUG nova.compute.manager [None req-c141bf50-eff7-4504-b92e-cc8f3c0322c4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1976.275547] env[62405]: DEBUG oslo_vmware.api [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Waiting for the task: (returnval){ [ 1976.275547] env[62405]: value = "task-1947961" [ 1976.275547] env[62405]: _type = "Task" [ 1976.275547] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1976.276178] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc62a200-4fba-41aa-bcf0-2d86541bc8c6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1976.290633] env[62405]: DEBUG oslo_vmware.api [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1947961, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1976.305323] env[62405]: DEBUG oslo_vmware.api [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52fe4f10-4e32-c0b8-628b-847a520de94a, 'name': SearchDatastore_Task, 'duration_secs': 0.072586} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1976.305672] env[62405]: DEBUG oslo_concurrency.lockutils [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1976.305920] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: d937c90c-10b2-4c57-b1db-7b433c3d9017] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1976.306175] env[62405]: DEBUG oslo_concurrency.lockutils [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1976.306341] env[62405]: DEBUG oslo_concurrency.lockutils [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1976.306542] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1976.306823] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-53ceccb1-7293-45cd-89f0-c1f3d3c55a0e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1976.316398] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1976.316657] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1976.317744] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4ad68c72-53e3-458f-8fa4-9d507c8ca712 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1976.323754] env[62405]: DEBUG oslo_vmware.api [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for the task: (returnval){ [ 1976.323754] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52743ef4-5005-8aec-ea17-ccb7734a4551" [ 1976.323754] env[62405]: _type = "Task" [ 1976.323754] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1976.332608] env[62405]: DEBUG oslo_vmware.api [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52743ef4-5005-8aec-ea17-ccb7734a4551, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1976.478708] env[62405]: DEBUG nova.scheduler.client.report [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1976.556334] env[62405]: DEBUG oslo_vmware.api [None req-5472e0a7-a69a-4211-86bb-156b0799f8ee tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947957, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1976.564645] env[62405]: DEBUG oslo_concurrency.lockutils [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Releasing lock "refresh_cache-f16e3d13-6db6-4f61-b0e4-661856a9166b" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1976.577288] env[62405]: DEBUG oslo_vmware.api [None req-8f178103-3f5f-4a58-a9e8-b71478d977af tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1947958, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.566124} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1976.577288] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f178103-3f5f-4a58-a9e8-b71478d977af tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d/78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1976.577288] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-8f178103-3f5f-4a58-a9e8-b71478d977af tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1976.577288] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4b0b22f2-b6f9-4924-a877-7fdd27438ace {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1976.583202] env[62405]: DEBUG oslo_vmware.api [None req-8f178103-3f5f-4a58-a9e8-b71478d977af tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Waiting for the task: (returnval){ [ 1976.583202] env[62405]: value = "task-1947962" [ 1976.583202] env[62405]: _type = "Task" [ 1976.583202] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1976.593117] env[62405]: DEBUG oslo_vmware.api [None req-8f178103-3f5f-4a58-a9e8-b71478d977af tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1947962, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1976.603206] env[62405]: DEBUG nova.virt.hardware [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='e1bf35bb227d33fb9c892b6056c24119',container_format='bare',created_at=2024-12-21T03:28:24Z,direct_url=,disk_format='vmdk',id=6ee02a73-5729-47e1-93a1-23fefdcafc1e,min_disk=1,min_ram=0,name='tempest-ServersNegativeTestJSON-server-947290172-shelved',owner='521150d8f23f4f76a0c785481c99e897',properties=ImageMetaProps,protected=,size=31667712,status='active',tags=,updated_at=2024-12-21T03:28:41Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1976.603466] env[62405]: DEBUG nova.virt.hardware [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1976.603623] env[62405]: DEBUG nova.virt.hardware [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1976.604583] env[62405]: DEBUG nova.virt.hardware [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1976.604583] env[62405]: DEBUG nova.virt.hardware [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1976.604583] env[62405]: DEBUG nova.virt.hardware [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1976.604583] env[62405]: DEBUG nova.virt.hardware [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1976.604583] env[62405]: DEBUG nova.virt.hardware [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1976.604794] env[62405]: DEBUG nova.virt.hardware [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1976.604794] env[62405]: DEBUG nova.virt.hardware [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1976.605380] env[62405]: DEBUG nova.virt.hardware [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1976.605799] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa169d20-9dc9-4d9f-bc26-acb0db48a3c2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1976.614640] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29c973c5-e9c1-423d-925b-3b063a414cd3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1976.630129] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e6:78:c4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '412cde91-d0f0-4193-b36b-d8b9d17384c6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dba92750-bf41-4683-b71d-128391ff29d0', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1976.636862] env[62405]: DEBUG oslo.service.loopingcall [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1976.639106] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1976.639106] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-15e9eb49-fc7e-4735-9f9a-5c33359986d0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1976.657834] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1976.657834] env[62405]: value = "task-1947963" [ 1976.657834] env[62405]: _type = "Task" [ 1976.657834] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1976.669648] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947963, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1976.763406] env[62405]: DEBUG oslo_concurrency.lockutils [req-d772ccb3-7c4b-4b19-8bb8-27937a1c0f91 req-56297e7c-c918-41a3-8dcb-049488b9fd80 service nova] Releasing lock "refresh_cache-15718289-5c19-4c2d-a9d8-d30ce0d63c68" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1976.763714] env[62405]: DEBUG nova.compute.manager [req-d772ccb3-7c4b-4b19-8bb8-27937a1c0f91 req-56297e7c-c918-41a3-8dcb-049488b9fd80 service nova] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Received event network-vif-plugged-dba92750-bf41-4683-b71d-128391ff29d0 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1976.763889] env[62405]: DEBUG oslo_concurrency.lockutils [req-d772ccb3-7c4b-4b19-8bb8-27937a1c0f91 req-56297e7c-c918-41a3-8dcb-049488b9fd80 service nova] Acquiring lock "f16e3d13-6db6-4f61-b0e4-661856a9166b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1976.764127] env[62405]: DEBUG oslo_concurrency.lockutils [req-d772ccb3-7c4b-4b19-8bb8-27937a1c0f91 req-56297e7c-c918-41a3-8dcb-049488b9fd80 service nova] Lock "f16e3d13-6db6-4f61-b0e4-661856a9166b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1976.764297] env[62405]: DEBUG oslo_concurrency.lockutils [req-d772ccb3-7c4b-4b19-8bb8-27937a1c0f91 req-56297e7c-c918-41a3-8dcb-049488b9fd80 service nova] Lock "f16e3d13-6db6-4f61-b0e4-661856a9166b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1976.764509] env[62405]: DEBUG nova.compute.manager [req-d772ccb3-7c4b-4b19-8bb8-27937a1c0f91 req-56297e7c-c918-41a3-8dcb-049488b9fd80 service nova] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] No waiting events found dispatching network-vif-plugged-dba92750-bf41-4683-b71d-128391ff29d0 {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1976.764639] env[62405]: WARNING nova.compute.manager [req-d772ccb3-7c4b-4b19-8bb8-27937a1c0f91 req-56297e7c-c918-41a3-8dcb-049488b9fd80 service nova] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Received unexpected event network-vif-plugged-dba92750-bf41-4683-b71d-128391ff29d0 for instance with vm_state shelved_offloaded and task_state spawning. [ 1976.764808] env[62405]: DEBUG nova.compute.manager [req-d772ccb3-7c4b-4b19-8bb8-27937a1c0f91 req-56297e7c-c918-41a3-8dcb-049488b9fd80 service nova] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Received event network-changed-dba92750-bf41-4683-b71d-128391ff29d0 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1976.764963] env[62405]: DEBUG nova.compute.manager [req-d772ccb3-7c4b-4b19-8bb8-27937a1c0f91 req-56297e7c-c918-41a3-8dcb-049488b9fd80 service nova] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Refreshing instance network info cache due to event network-changed-dba92750-bf41-4683-b71d-128391ff29d0. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1976.765167] env[62405]: DEBUG oslo_concurrency.lockutils [req-d772ccb3-7c4b-4b19-8bb8-27937a1c0f91 req-56297e7c-c918-41a3-8dcb-049488b9fd80 service nova] Acquiring lock "refresh_cache-f16e3d13-6db6-4f61-b0e4-661856a9166b" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1976.765306] env[62405]: DEBUG oslo_concurrency.lockutils [req-d772ccb3-7c4b-4b19-8bb8-27937a1c0f91 req-56297e7c-c918-41a3-8dcb-049488b9fd80 service nova] Acquired lock "refresh_cache-f16e3d13-6db6-4f61-b0e4-661856a9166b" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1976.765464] env[62405]: DEBUG nova.network.neutron [req-d772ccb3-7c4b-4b19-8bb8-27937a1c0f91 req-56297e7c-c918-41a3-8dcb-049488b9fd80 service nova] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Refreshing network info cache for port dba92750-bf41-4683-b71d-128391ff29d0 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1976.788297] env[62405]: DEBUG oslo_vmware.api [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1947961, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1976.834121] env[62405]: DEBUG oslo_vmware.api [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52743ef4-5005-8aec-ea17-ccb7734a4551, 'name': SearchDatastore_Task, 'duration_secs': 0.009812} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1976.835410] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-726fc290-25d0-470b-9cff-9daa17d2843e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1976.841852] env[62405]: DEBUG oslo_vmware.api [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for the task: (returnval){ [ 1976.841852] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52ad1e79-127a-331c-83b4-5d7bddb203c1" [ 1976.841852] env[62405]: _type = "Task" [ 1976.841852] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1976.850835] env[62405]: DEBUG oslo_vmware.api [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52ad1e79-127a-331c-83b4-5d7bddb203c1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1976.951064] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Acquiring lock "6fcfada3-d73a-4814-bf45-d34b26d76d4a" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1976.951064] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Lock "6fcfada3-d73a-4814-bf45-d34b26d76d4a" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1976.951064] env[62405]: INFO nova.compute.manager [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Shelving [ 1976.983871] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.537s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1976.984402] env[62405]: DEBUG nova.compute.manager [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 5645c9a4-2640-4190-956f-00fc2ea03a3a] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1976.986869] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.832s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1976.988791] env[62405]: INFO nova.compute.claims [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1977.055670] env[62405]: DEBUG oslo_vmware.api [None req-5472e0a7-a69a-4211-86bb-156b0799f8ee tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947957, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1977.092763] env[62405]: DEBUG oslo_vmware.api [None req-8f178103-3f5f-4a58-a9e8-b71478d977af tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1947962, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077059} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1977.093082] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-8f178103-3f5f-4a58-a9e8-b71478d977af tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1977.093847] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0a095c5-3863-4a24-aefd-c535c02f7593 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.117236] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-8f178103-3f5f-4a58-a9e8-b71478d977af tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Reconfiguring VM instance instance-00000059 to attach disk [datastore1] 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d/78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1977.117544] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9982e0eb-b472-4389-9032-936b95335107 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.139142] env[62405]: DEBUG oslo_vmware.api [None req-8f178103-3f5f-4a58-a9e8-b71478d977af tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Waiting for the task: (returnval){ [ 1977.139142] env[62405]: value = "task-1947964" [ 1977.139142] env[62405]: _type = "Task" [ 1977.139142] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1977.148863] env[62405]: DEBUG oslo_vmware.api [None req-8f178103-3f5f-4a58-a9e8-b71478d977af tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1947964, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1977.168611] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947963, 'name': CreateVM_Task} progress is 25%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1977.289396] env[62405]: DEBUG oslo_vmware.api [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1947961, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1977.353785] env[62405]: DEBUG oslo_vmware.api [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52ad1e79-127a-331c-83b4-5d7bddb203c1, 'name': SearchDatastore_Task, 'duration_secs': 0.009192} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1977.354080] env[62405]: DEBUG oslo_concurrency.lockutils [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1977.354376] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] d937c90c-10b2-4c57-b1db-7b433c3d9017/d937c90c-10b2-4c57-b1db-7b433c3d9017.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1977.354653] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-50c38fb7-a8dc-4ad7-9317-1dd77ad1fdc7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.361706] env[62405]: DEBUG oslo_vmware.api [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for the task: (returnval){ [ 1977.361706] env[62405]: value = "task-1947965" [ 1977.361706] env[62405]: _type = "Task" [ 1977.361706] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1977.370104] env[62405]: DEBUG oslo_vmware.api [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947965, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1977.493189] env[62405]: DEBUG nova.compute.utils [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1977.497867] env[62405]: DEBUG nova.compute.manager [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 5645c9a4-2640-4190-956f-00fc2ea03a3a] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1977.497867] env[62405]: DEBUG nova.network.neutron [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 5645c9a4-2640-4190-956f-00fc2ea03a3a] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1977.556638] env[62405]: DEBUG oslo_vmware.api [None req-5472e0a7-a69a-4211-86bb-156b0799f8ee tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947957, 'name': ReconfigVM_Task, 'duration_secs': 1.662336} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1977.560041] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-5472e0a7-a69a-4211-86bb-156b0799f8ee tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: ec0a05fc-4a11-4e07-a03c-e357a7a750ab] Reconfigured VM instance instance-00000062 to attach disk [datastore1] ec0a05fc-4a11-4e07-a03c-e357a7a750ab/ec0a05fc-4a11-4e07-a03c-e357a7a750ab.vmdk or device None with type thin {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1977.560465] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-5472e0a7-a69a-4211-86bb-156b0799f8ee tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: ec0a05fc-4a11-4e07-a03c-e357a7a750ab] Updating instance 'ec0a05fc-4a11-4e07-a03c-e357a7a750ab' progress to 50 {{(pid=62405) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1977.648900] env[62405]: DEBUG oslo_vmware.api [None req-8f178103-3f5f-4a58-a9e8-b71478d977af tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1947964, 'name': ReconfigVM_Task, 'duration_secs': 0.404141} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1977.648900] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-8f178103-3f5f-4a58-a9e8-b71478d977af tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Reconfigured VM instance instance-00000059 to attach disk [datastore1] 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d/78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1977.649471] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-de1e1e65-260e-47f5-b778-a8cfcab6da2e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.657436] env[62405]: DEBUG oslo_vmware.api [None req-8f178103-3f5f-4a58-a9e8-b71478d977af tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Waiting for the task: (returnval){ [ 1977.657436] env[62405]: value = "task-1947966" [ 1977.657436] env[62405]: _type = "Task" [ 1977.657436] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1977.669838] env[62405]: DEBUG oslo_vmware.api [None req-8f178103-3f5f-4a58-a9e8-b71478d977af tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1947966, 'name': Rename_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1977.673092] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947963, 'name': CreateVM_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1977.790201] env[62405]: DEBUG oslo_vmware.api [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1947961, 'name': PowerOnVM_Task, 'duration_secs': 1.037991} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1977.790643] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 14512ed2-9eae-4753-b83c-8c0d0d5d9432] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1977.790734] env[62405]: INFO nova.compute.manager [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 14512ed2-9eae-4753-b83c-8c0d0d5d9432] Took 13.57 seconds to spawn the instance on the hypervisor. [ 1977.790972] env[62405]: DEBUG nova.compute.manager [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 14512ed2-9eae-4753-b83c-8c0d0d5d9432] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1977.791833] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07d7e843-f954-4d42-8567-e767133f81c5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.875842] env[62405]: DEBUG oslo_vmware.api [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947965, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1977.895750] env[62405]: DEBUG nova.policy [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4ab5cc5829014c4ebafbf88400b22a8c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5ba2fba100b943a2a415ec37b9365388', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1977.959799] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1977.960179] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d6f846af-2117-49a2-9a9e-25d84d235428 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.968798] env[62405]: DEBUG oslo_vmware.api [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Waiting for the task: (returnval){ [ 1977.968798] env[62405]: value = "task-1947967" [ 1977.968798] env[62405]: _type = "Task" [ 1977.968798] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1977.982309] env[62405]: DEBUG oslo_vmware.api [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1947967, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1978.005068] env[62405]: DEBUG nova.compute.manager [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 5645c9a4-2640-4190-956f-00fc2ea03a3a] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1978.068083] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4e6087d-dbf4-4da9-8628-7716b682e2db {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.096451] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70296fa4-351f-4e91-8652-06ccac1620fa {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.120640] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-5472e0a7-a69a-4211-86bb-156b0799f8ee tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: ec0a05fc-4a11-4e07-a03c-e357a7a750ab] Updating instance 'ec0a05fc-4a11-4e07-a03c-e357a7a750ab' progress to 67 {{(pid=62405) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1978.172294] env[62405]: DEBUG oslo_vmware.api [None req-8f178103-3f5f-4a58-a9e8-b71478d977af tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1947966, 'name': Rename_Task, 'duration_secs': 0.407097} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1978.172702] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f178103-3f5f-4a58-a9e8-b71478d977af tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1978.173034] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-11165797-5511-4644-b369-605608c8e394 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.178037] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947963, 'name': CreateVM_Task, 'duration_secs': 1.386316} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1978.179341] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1978.180166] env[62405]: DEBUG oslo_concurrency.lockutils [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6ee02a73-5729-47e1-93a1-23fefdcafc1e" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1978.180379] env[62405]: DEBUG oslo_concurrency.lockutils [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6ee02a73-5729-47e1-93a1-23fefdcafc1e" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1978.180808] env[62405]: DEBUG oslo_concurrency.lockutils [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/6ee02a73-5729-47e1-93a1-23fefdcafc1e" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1978.184205] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a8be8d0f-9a13-4118-8b83-f4c884380523 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.188291] env[62405]: DEBUG oslo_vmware.api [None req-8f178103-3f5f-4a58-a9e8-b71478d977af tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Waiting for the task: (returnval){ [ 1978.188291] env[62405]: value = "task-1947968" [ 1978.188291] env[62405]: _type = "Task" [ 1978.188291] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1978.193535] env[62405]: DEBUG oslo_vmware.api [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Waiting for the task: (returnval){ [ 1978.193535] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52c557cf-fc12-f658-5866-77eaaf3c3180" [ 1978.193535] env[62405]: _type = "Task" [ 1978.193535] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1978.207657] env[62405]: DEBUG oslo_vmware.api [None req-8f178103-3f5f-4a58-a9e8-b71478d977af tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1947968, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1978.218915] env[62405]: DEBUG oslo_concurrency.lockutils [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6ee02a73-5729-47e1-93a1-23fefdcafc1e" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1978.218915] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Processing image 6ee02a73-5729-47e1-93a1-23fefdcafc1e {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1978.218915] env[62405]: DEBUG oslo_concurrency.lockutils [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6ee02a73-5729-47e1-93a1-23fefdcafc1e/6ee02a73-5729-47e1-93a1-23fefdcafc1e.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1978.218915] env[62405]: DEBUG oslo_concurrency.lockutils [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6ee02a73-5729-47e1-93a1-23fefdcafc1e/6ee02a73-5729-47e1-93a1-23fefdcafc1e.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1978.218915] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1978.218915] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e0541d00-9c98-4160-9120-15122e7ee37f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.231681] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1978.231945] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1978.232747] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ff8fdb73-9a22-4d0f-bb3a-fac68617c534 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.239367] env[62405]: DEBUG oslo_vmware.api [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Waiting for the task: (returnval){ [ 1978.239367] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5213c42a-9e33-54bd-8f22-9264a2750ace" [ 1978.239367] env[62405]: _type = "Task" [ 1978.239367] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1978.251080] env[62405]: DEBUG oslo_vmware.api [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5213c42a-9e33-54bd-8f22-9264a2750ace, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1978.252417] env[62405]: DEBUG nova.network.neutron [req-d772ccb3-7c4b-4b19-8bb8-27937a1c0f91 req-56297e7c-c918-41a3-8dcb-049488b9fd80 service nova] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Updated VIF entry in instance network info cache for port dba92750-bf41-4683-b71d-128391ff29d0. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1978.252810] env[62405]: DEBUG nova.network.neutron [req-d772ccb3-7c4b-4b19-8bb8-27937a1c0f91 req-56297e7c-c918-41a3-8dcb-049488b9fd80 service nova] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Updating instance_info_cache with network_info: [{"id": "dba92750-bf41-4683-b71d-128391ff29d0", "address": "fa:16:3e:e6:78:c4", "network": {"id": "72000fdf-4f7a-4c95-a7ac-d8404249f55c", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-589425764-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "521150d8f23f4f76a0c785481c99e897", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "412cde91-d0f0-4193-b36b-d8b9d17384c6", "external-id": "nsx-vlan-transportzone-461", "segmentation_id": 461, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdba92750-bf", "ovs_interfaceid": "dba92750-bf41-4683-b71d-128391ff29d0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1978.318251] env[62405]: INFO nova.compute.manager [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 14512ed2-9eae-4753-b83c-8c0d0d5d9432] Took 27.68 seconds to build instance. [ 1978.373025] env[62405]: DEBUG oslo_vmware.api [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947965, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.631384} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1978.373271] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] d937c90c-10b2-4c57-b1db-7b433c3d9017/d937c90c-10b2-4c57-b1db-7b433c3d9017.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1978.373485] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: d937c90c-10b2-4c57-b1db-7b433c3d9017] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1978.376946] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bc2b4311-f9bb-41da-80fd-c0c49aed7971 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.386141] env[62405]: DEBUG oslo_vmware.api [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for the task: (returnval){ [ 1978.386141] env[62405]: value = "task-1947969" [ 1978.386141] env[62405]: _type = "Task" [ 1978.386141] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1978.394167] env[62405]: DEBUG oslo_vmware.api [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947969, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1978.410465] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77d70c5b-8af5-45d7-aeb9-5f3a3d9e533a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.419460] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f17f3e12-4f2a-4a32-8e35-591ad8ecc972 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.452094] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66af3872-6bcb-4f12-95ff-68f0f4854f51 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.460656] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b7da207-6016-41d4-8891-12573fe10166 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.477216] env[62405]: DEBUG nova.compute.provider_tree [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1978.487843] env[62405]: DEBUG oslo_vmware.api [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1947967, 'name': PowerOffVM_Task, 'duration_secs': 0.288683} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1978.488772] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1978.489607] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a9f7d55-065f-420b-8c2c-bb05cca00dc4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.509228] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2276b804-4bd3-4b28-b61e-f3cda445afd5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.617537] env[62405]: DEBUG nova.network.neutron [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 5645c9a4-2640-4190-956f-00fc2ea03a3a] Successfully created port: 0830bd57-ad4c-4775-9f4b-826fad7b43ed {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1978.699627] env[62405]: DEBUG oslo_vmware.api [None req-8f178103-3f5f-4a58-a9e8-b71478d977af tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1947968, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1978.752141] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Preparing fetch location {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1978.752416] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Fetch image to [datastore1] OSTACK_IMG_4808a882-df74-42fc-925f-5516b3e0adb3/OSTACK_IMG_4808a882-df74-42fc-925f-5516b3e0adb3.vmdk {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1978.752605] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Downloading stream optimized image 6ee02a73-5729-47e1-93a1-23fefdcafc1e to [datastore1] OSTACK_IMG_4808a882-df74-42fc-925f-5516b3e0adb3/OSTACK_IMG_4808a882-df74-42fc-925f-5516b3e0adb3.vmdk on the data store datastore1 as vApp {{(pid=62405) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1978.752769] env[62405]: DEBUG nova.virt.vmwareapi.images [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Downloading image file data 6ee02a73-5729-47e1-93a1-23fefdcafc1e to the ESX as VM named 'OSTACK_IMG_4808a882-df74-42fc-925f-5516b3e0adb3' {{(pid=62405) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1978.755194] env[62405]: DEBUG nova.network.neutron [None req-5472e0a7-a69a-4211-86bb-156b0799f8ee tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: ec0a05fc-4a11-4e07-a03c-e357a7a750ab] Port 1336ca88-2020-4b2c-b082-e45e1fe68506 binding to destination host cpu-1 is already ACTIVE {{(pid=62405) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1978.756829] env[62405]: DEBUG oslo_concurrency.lockutils [req-d772ccb3-7c4b-4b19-8bb8-27937a1c0f91 req-56297e7c-c918-41a3-8dcb-049488b9fd80 service nova] Releasing lock "refresh_cache-f16e3d13-6db6-4f61-b0e4-661856a9166b" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1978.757070] env[62405]: DEBUG nova.compute.manager [req-d772ccb3-7c4b-4b19-8bb8-27937a1c0f91 req-56297e7c-c918-41a3-8dcb-049488b9fd80 service nova] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] Received event network-changed-3189d804-1d8d-4356-bbf0-e0bbda0a2d32 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1978.757250] env[62405]: DEBUG nova.compute.manager [req-d772ccb3-7c4b-4b19-8bb8-27937a1c0f91 req-56297e7c-c918-41a3-8dcb-049488b9fd80 service nova] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] Refreshing instance network info cache due to event network-changed-3189d804-1d8d-4356-bbf0-e0bbda0a2d32. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1978.757456] env[62405]: DEBUG oslo_concurrency.lockutils [req-d772ccb3-7c4b-4b19-8bb8-27937a1c0f91 req-56297e7c-c918-41a3-8dcb-049488b9fd80 service nova] Acquiring lock "refresh_cache-15718289-5c19-4c2d-a9d8-d30ce0d63c68" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1978.757600] env[62405]: DEBUG oslo_concurrency.lockutils [req-d772ccb3-7c4b-4b19-8bb8-27937a1c0f91 req-56297e7c-c918-41a3-8dcb-049488b9fd80 service nova] Acquired lock "refresh_cache-15718289-5c19-4c2d-a9d8-d30ce0d63c68" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1978.757762] env[62405]: DEBUG nova.network.neutron [req-d772ccb3-7c4b-4b19-8bb8-27937a1c0f91 req-56297e7c-c918-41a3-8dcb-049488b9fd80 service nova] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] Refreshing network info cache for port 3189d804-1d8d-4356-bbf0-e0bbda0a2d32 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1978.821390] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e17189c0-7a7b-42cd-8565-a061e0ff375e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Lock "14512ed2-9eae-4753-b83c-8c0d0d5d9432" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.203s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1978.855855] env[62405]: DEBUG oslo_vmware.rw_handles [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1978.855855] env[62405]: value = "resgroup-9" [ 1978.855855] env[62405]: _type = "ResourcePool" [ 1978.855855] env[62405]: }. {{(pid=62405) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1978.857532] env[62405]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-cacca4da-58f9-41c4-bd61-c621e11e038c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.896505] env[62405]: DEBUG oslo_vmware.rw_handles [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Lease: (returnval){ [ 1978.896505] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]524b3cf6-3af8-d67f-c176-36f35108f91e" [ 1978.896505] env[62405]: _type = "HttpNfcLease" [ 1978.896505] env[62405]: } obtained for vApp import into resource pool (val){ [ 1978.896505] env[62405]: value = "resgroup-9" [ 1978.896505] env[62405]: _type = "ResourcePool" [ 1978.896505] env[62405]: }. {{(pid=62405) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1978.896761] env[62405]: DEBUG oslo_vmware.api [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Waiting for the lease: (returnval){ [ 1978.896761] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]524b3cf6-3af8-d67f-c176-36f35108f91e" [ 1978.896761] env[62405]: _type = "HttpNfcLease" [ 1978.896761] env[62405]: } to be ready. {{(pid=62405) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1978.903095] env[62405]: DEBUG oslo_vmware.api [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947969, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.100131} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1978.903737] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: d937c90c-10b2-4c57-b1db-7b433c3d9017] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1978.904503] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a2492a6-2641-40d5-9548-7361d65877a9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.908289] env[62405]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1978.908289] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]524b3cf6-3af8-d67f-c176-36f35108f91e" [ 1978.908289] env[62405]: _type = "HttpNfcLease" [ 1978.908289] env[62405]: } is initializing. {{(pid=62405) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1978.928359] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: d937c90c-10b2-4c57-b1db-7b433c3d9017] Reconfiguring VM instance instance-00000068 to attach disk [datastore1] d937c90c-10b2-4c57-b1db-7b433c3d9017/d937c90c-10b2-4c57-b1db-7b433c3d9017.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1978.928696] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-69f853f2-39ce-401e-a94a-b1e2cc296daf {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.948018] env[62405]: DEBUG oslo_vmware.api [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for the task: (returnval){ [ 1978.948018] env[62405]: value = "task-1947971" [ 1978.948018] env[62405]: _type = "Task" [ 1978.948018] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1978.958222] env[62405]: DEBUG oslo_vmware.api [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947971, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1979.009502] env[62405]: ERROR nova.scheduler.client.report [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [req-a42149c6-f783-467b-9d4c-3916463298e6] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7d5eded7-a501-4fa6-b1d3-60e273d555d7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-a42149c6-f783-467b-9d4c-3916463298e6"}]} [ 1979.019538] env[62405]: DEBUG nova.compute.manager [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 5645c9a4-2640-4190-956f-00fc2ea03a3a] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1979.023106] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Creating Snapshot of the VM instance {{(pid=62405) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1979.023453] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-ca2607e5-bfec-4930-bcfc-cf5257cf3034 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.034890] env[62405]: DEBUG oslo_vmware.api [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Waiting for the task: (returnval){ [ 1979.034890] env[62405]: value = "task-1947972" [ 1979.034890] env[62405]: _type = "Task" [ 1979.034890] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1979.039641] env[62405]: DEBUG nova.scheduler.client.report [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Refreshing inventories for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1979.047286] env[62405]: DEBUG oslo_vmware.api [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1947972, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1979.058456] env[62405]: DEBUG nova.scheduler.client.report [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Updating ProviderTree inventory for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1979.058809] env[62405]: DEBUG nova.compute.provider_tree [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1979.064393] env[62405]: DEBUG nova.virt.hardware [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1979.064798] env[62405]: DEBUG nova.virt.hardware [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1979.065034] env[62405]: DEBUG nova.virt.hardware [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1979.065320] env[62405]: DEBUG nova.virt.hardware [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1979.065566] env[62405]: DEBUG nova.virt.hardware [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1979.065823] env[62405]: DEBUG nova.virt.hardware [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1979.066116] env[62405]: DEBUG nova.virt.hardware [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1979.066338] env[62405]: DEBUG nova.virt.hardware [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1979.066527] env[62405]: DEBUG nova.virt.hardware [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1979.066744] env[62405]: DEBUG nova.virt.hardware [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1979.067016] env[62405]: DEBUG nova.virt.hardware [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1979.068278] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8344c64-8b7f-457e-a09e-ee0e87125363 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.071733] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0b33cf7d-6f64-4ab9-8082-abc70e3938d0 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Acquiring lock "d186b2f4-3fd1-44be-b8a4-080972aff3a0" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1979.072050] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0b33cf7d-6f64-4ab9-8082-abc70e3938d0 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Lock "d186b2f4-3fd1-44be-b8a4-080972aff3a0" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1979.079019] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b03804c-7294-4d57-a412-aecac12c0112 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.085982] env[62405]: DEBUG nova.scheduler.client.report [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Refreshing aggregate associations for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7, aggregates: None {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1979.105019] env[62405]: DEBUG nova.scheduler.client.report [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Refreshing trait associations for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1979.202434] env[62405]: DEBUG oslo_vmware.api [None req-8f178103-3f5f-4a58-a9e8-b71478d977af tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1947968, 'name': PowerOnVM_Task, 'duration_secs': 0.5705} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1979.202759] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f178103-3f5f-4a58-a9e8-b71478d977af tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1979.203018] env[62405]: DEBUG nova.compute.manager [None req-8f178103-3f5f-4a58-a9e8-b71478d977af tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1979.203896] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1301d429-12e8-4769-876c-bbd33620886f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.414639] env[62405]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1979.414639] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]524b3cf6-3af8-d67f-c176-36f35108f91e" [ 1979.414639] env[62405]: _type = "HttpNfcLease" [ 1979.414639] env[62405]: } is initializing. {{(pid=62405) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1979.450690] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6c83488-7ec8-458f-b386-d6ef394a7e50 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.464912] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50485d99-cdc8-46d8-883f-eb1e90773271 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.468600] env[62405]: DEBUG oslo_vmware.api [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947971, 'name': ReconfigVM_Task, 'duration_secs': 0.485798} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1979.468933] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: d937c90c-10b2-4c57-b1db-7b433c3d9017] Reconfigured VM instance instance-00000068 to attach disk [datastore1] d937c90c-10b2-4c57-b1db-7b433c3d9017/d937c90c-10b2-4c57-b1db-7b433c3d9017.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1979.469891] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-78f896aa-5ac3-4cfa-85bf-f2e2799a1e44 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.497424] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daa1257b-3607-4e31-b3db-61d86ed0ec3b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.500131] env[62405]: DEBUG oslo_vmware.api [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for the task: (returnval){ [ 1979.500131] env[62405]: value = "task-1947973" [ 1979.500131] env[62405]: _type = "Task" [ 1979.500131] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1979.506573] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f24e3500-f39d-438a-8ce2-11c04d6005f5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.513086] env[62405]: DEBUG oslo_vmware.api [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947973, 'name': Rename_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1979.524008] env[62405]: DEBUG nova.compute.provider_tree [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1979.546362] env[62405]: DEBUG oslo_vmware.api [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1947972, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1979.574718] env[62405]: INFO nova.compute.manager [None req-0b33cf7d-6f64-4ab9-8082-abc70e3938d0 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Detaching volume 3e3abe30-bd3e-4a0a-a97d-e8583b031955 [ 1979.616678] env[62405]: INFO nova.virt.block_device [None req-0b33cf7d-6f64-4ab9-8082-abc70e3938d0 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Attempting to driver detach volume 3e3abe30-bd3e-4a0a-a97d-e8583b031955 from mountpoint /dev/sdb [ 1979.616910] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b33cf7d-6f64-4ab9-8082-abc70e3938d0 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Volume detach. Driver type: vmdk {{(pid=62405) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1979.617128] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b33cf7d-6f64-4ab9-8082-abc70e3938d0 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-401540', 'volume_id': '3e3abe30-bd3e-4a0a-a97d-e8583b031955', 'name': 'volume-3e3abe30-bd3e-4a0a-a97d-e8583b031955', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'd186b2f4-3fd1-44be-b8a4-080972aff3a0', 'attached_at': '', 'detached_at': '', 'volume_id': '3e3abe30-bd3e-4a0a-a97d-e8583b031955', 'serial': '3e3abe30-bd3e-4a0a-a97d-e8583b031955'} {{(pid=62405) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1979.617995] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f04e6b13-914c-4fc1-a243-5f983482fb70 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.639621] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caca656b-a30f-4551-a1dd-059b5b8918c0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.649166] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23eec5ed-4f96-435d-8b2e-60b745875ac3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.671578] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08db6565-523e-4cb6-bb23-98574b696d72 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.687063] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b33cf7d-6f64-4ab9-8082-abc70e3938d0 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] The volume has not been displaced from its original location: [datastore1] volume-3e3abe30-bd3e-4a0a-a97d-e8583b031955/volume-3e3abe30-bd3e-4a0a-a97d-e8583b031955.vmdk. No consolidation needed. {{(pid=62405) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1979.692289] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b33cf7d-6f64-4ab9-8082-abc70e3938d0 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Reconfiguring VM instance instance-0000003d to detach disk 2001 {{(pid=62405) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1979.692640] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-967c5abf-9f30-46f0-ac81-0ab8b0b17f19 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.711235] env[62405]: DEBUG oslo_vmware.api [None req-0b33cf7d-6f64-4ab9-8082-abc70e3938d0 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Waiting for the task: (returnval){ [ 1979.711235] env[62405]: value = "task-1947974" [ 1979.711235] env[62405]: _type = "Task" [ 1979.711235] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1979.722093] env[62405]: DEBUG oslo_vmware.api [None req-0b33cf7d-6f64-4ab9-8082-abc70e3938d0 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Task: {'id': task-1947974, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1979.723880] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8f178103-3f5f-4a58-a9e8-b71478d977af tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1979.784641] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5472e0a7-a69a-4211-86bb-156b0799f8ee tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquiring lock "ec0a05fc-4a11-4e07-a03c-e357a7a750ab-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1979.784979] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5472e0a7-a69a-4211-86bb-156b0799f8ee tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Lock "ec0a05fc-4a11-4e07-a03c-e357a7a750ab-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1979.785278] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5472e0a7-a69a-4211-86bb-156b0799f8ee tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Lock "ec0a05fc-4a11-4e07-a03c-e357a7a750ab-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1979.906441] env[62405]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1979.906441] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]524b3cf6-3af8-d67f-c176-36f35108f91e" [ 1979.906441] env[62405]: _type = "HttpNfcLease" [ 1979.906441] env[62405]: } is ready. {{(pid=62405) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1979.906951] env[62405]: DEBUG oslo_vmware.rw_handles [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1979.906951] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]524b3cf6-3af8-d67f-c176-36f35108f91e" [ 1979.906951] env[62405]: _type = "HttpNfcLease" [ 1979.906951] env[62405]: }. {{(pid=62405) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1979.907516] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff02c65f-6e33-484b-8640-7a8e890c14b1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.917525] env[62405]: DEBUG oslo_vmware.rw_handles [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5280a21c-3450-ce54-b3c9-72bfd5f4c03b/disk-0.vmdk from lease info. {{(pid=62405) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1979.917724] env[62405]: DEBUG oslo_vmware.rw_handles [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Creating HTTP connection to write to file with size = 31667712 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5280a21c-3450-ce54-b3c9-72bfd5f4c03b/disk-0.vmdk. {{(pid=62405) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1979.988250] env[62405]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-29557c38-4f58-4cf9-967c-d02295f4952b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.010065] env[62405]: DEBUG oslo_vmware.api [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947973, 'name': Rename_Task, 'duration_secs': 0.234686} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1980.010392] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: d937c90c-10b2-4c57-b1db-7b433c3d9017] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1980.010674] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e605f65b-35de-4554-aa7b-08f621af59de {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.017293] env[62405]: DEBUG oslo_vmware.api [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for the task: (returnval){ [ 1980.017293] env[62405]: value = "task-1947975" [ 1980.017293] env[62405]: _type = "Task" [ 1980.017293] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1980.025487] env[62405]: DEBUG oslo_vmware.api [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947975, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1980.029535] env[62405]: DEBUG nova.scheduler.client.report [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1980.035253] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2ad2a64f-16f2-4d10-8ae3-d9b120096747 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquiring lock "interface-556e1bca-f2f1-4200-96df-997d48ce5a15-e5b357f2-b442-4514-aa4d-9234dfa04642" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1980.035500] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2ad2a64f-16f2-4d10-8ae3-d9b120096747 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Lock "interface-556e1bca-f2f1-4200-96df-997d48ce5a15-e5b357f2-b442-4514-aa4d-9234dfa04642" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1980.035961] env[62405]: DEBUG nova.objects.instance [None req-2ad2a64f-16f2-4d10-8ae3-d9b120096747 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Lazy-loading 'flavor' on Instance uuid 556e1bca-f2f1-4200-96df-997d48ce5a15 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1980.048270] env[62405]: DEBUG oslo_vmware.api [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1947972, 'name': CreateSnapshot_Task, 'duration_secs': 0.780697} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1980.048270] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Created Snapshot of the VM instance {{(pid=62405) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1980.049061] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91d232fa-3e10-4f69-a395-dc1166c2e7ac {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.092495] env[62405]: DEBUG nova.network.neutron [req-d772ccb3-7c4b-4b19-8bb8-27937a1c0f91 req-56297e7c-c918-41a3-8dcb-049488b9fd80 service nova] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] Updated VIF entry in instance network info cache for port 3189d804-1d8d-4356-bbf0-e0bbda0a2d32. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1980.092495] env[62405]: DEBUG nova.network.neutron [req-d772ccb3-7c4b-4b19-8bb8-27937a1c0f91 req-56297e7c-c918-41a3-8dcb-049488b9fd80 service nova] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] Updating instance_info_cache with network_info: [{"id": "3189d804-1d8d-4356-bbf0-e0bbda0a2d32", "address": "fa:16:3e:b3:8f:fe", "network": {"id": "8e7f7222-48db-4dd5-a9e8-9a6d2b598918", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1945720715-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba9083cddcc24345b6ea5d2cbbbec5ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3189d804-1d", "ovs_interfaceid": "3189d804-1d8d-4356-bbf0-e0bbda0a2d32", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1980.224060] env[62405]: DEBUG oslo_vmware.api [None req-0b33cf7d-6f64-4ab9-8082-abc70e3938d0 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Task: {'id': task-1947974, 'name': ReconfigVM_Task, 'duration_secs': 0.260241} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1980.225357] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b33cf7d-6f64-4ab9-8082-abc70e3938d0 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Reconfigured VM instance instance-0000003d to detach disk 2001 {{(pid=62405) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1980.230063] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-096064c5-cf52-4006-8473-bc07bed41ad9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.250325] env[62405]: DEBUG oslo_vmware.api [None req-0b33cf7d-6f64-4ab9-8082-abc70e3938d0 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Waiting for the task: (returnval){ [ 1980.250325] env[62405]: value = "task-1947976" [ 1980.250325] env[62405]: _type = "Task" [ 1980.250325] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1980.265890] env[62405]: DEBUG oslo_vmware.api [None req-0b33cf7d-6f64-4ab9-8082-abc70e3938d0 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Task: {'id': task-1947976, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1980.531726] env[62405]: DEBUG oslo_vmware.api [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947975, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1980.538129] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.549s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1980.538129] env[62405]: DEBUG nova.compute.manager [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1980.541588] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 10.828s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1980.541968] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1980.542242] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62405) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1980.542637] env[62405]: DEBUG oslo_concurrency.lockutils [None req-59a47073-dc2f-443a-ab69-a2287d354fcb tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.421s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1980.542977] env[62405]: DEBUG nova.objects.instance [None req-59a47073-dc2f-443a-ab69-a2287d354fcb tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Lazy-loading 'resources' on Instance uuid 1b820a12-4ca5-4b89-9016-81ebac4f1c3b {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1980.551159] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5f23929-083e-4ad6-a7c0-713f6cecaf81 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.571769] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Creating linked-clone VM from snapshot {{(pid=62405) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1980.579112] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-aa17a92d-650d-4e61-b0c8-8984e918fbd1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.585027] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb8b5ff7-194e-4e24-992c-cfa30f921b64 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.606170] env[62405]: DEBUG oslo_concurrency.lockutils [req-d772ccb3-7c4b-4b19-8bb8-27937a1c0f91 req-56297e7c-c918-41a3-8dcb-049488b9fd80 service nova] Releasing lock "refresh_cache-15718289-5c19-4c2d-a9d8-d30ce0d63c68" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1980.606814] env[62405]: DEBUG nova.compute.manager [req-d772ccb3-7c4b-4b19-8bb8-27937a1c0f91 req-56297e7c-c918-41a3-8dcb-049488b9fd80 service nova] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] Received event network-changed-d440b728-2371-4e75-bb9f-2330f0318cae {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1980.609023] env[62405]: DEBUG nova.compute.manager [req-d772ccb3-7c4b-4b19-8bb8-27937a1c0f91 req-56297e7c-c918-41a3-8dcb-049488b9fd80 service nova] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] Refreshing instance network info cache due to event network-changed-d440b728-2371-4e75-bb9f-2330f0318cae. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1980.609023] env[62405]: DEBUG oslo_concurrency.lockutils [req-d772ccb3-7c4b-4b19-8bb8-27937a1c0f91 req-56297e7c-c918-41a3-8dcb-049488b9fd80 service nova] Acquiring lock "refresh_cache-556e1bca-f2f1-4200-96df-997d48ce5a15" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1980.609023] env[62405]: DEBUG oslo_concurrency.lockutils [req-d772ccb3-7c4b-4b19-8bb8-27937a1c0f91 req-56297e7c-c918-41a3-8dcb-049488b9fd80 service nova] Acquired lock "refresh_cache-556e1bca-f2f1-4200-96df-997d48ce5a15" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1980.609023] env[62405]: DEBUG nova.network.neutron [req-d772ccb3-7c4b-4b19-8bb8-27937a1c0f91 req-56297e7c-c918-41a3-8dcb-049488b9fd80 service nova] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] Refreshing network info cache for port d440b728-2371-4e75-bb9f-2330f0318cae {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1980.615723] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e49d522-9757-4f7c-be3b-90933c61e04f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.620570] env[62405]: DEBUG oslo_vmware.api [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Waiting for the task: (returnval){ [ 1980.620570] env[62405]: value = "task-1947977" [ 1980.620570] env[62405]: _type = "Task" [ 1980.620570] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1980.630780] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c01a93f-42a9-45c1-bdc9-1d30893d3c4c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.641524] env[62405]: DEBUG oslo_vmware.api [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1947977, 'name': CloneVM_Task} progress is 12%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1980.672105] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179788MB free_disk=170GB free_vcpus=48 pci_devices=None {{(pid=62405) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1980.672193] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1980.764915] env[62405]: DEBUG oslo_vmware.api [None req-0b33cf7d-6f64-4ab9-8082-abc70e3938d0 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Task: {'id': task-1947976, 'name': ReconfigVM_Task, 'duration_secs': 0.182875} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1980.764915] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b33cf7d-6f64-4ab9-8082-abc70e3938d0 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-401540', 'volume_id': '3e3abe30-bd3e-4a0a-a97d-e8583b031955', 'name': 'volume-3e3abe30-bd3e-4a0a-a97d-e8583b031955', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'd186b2f4-3fd1-44be-b8a4-080972aff3a0', 'attached_at': '', 'detached_at': '', 'volume_id': '3e3abe30-bd3e-4a0a-a97d-e8583b031955', 'serial': '3e3abe30-bd3e-4a0a-a97d-e8583b031955'} {{(pid=62405) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1980.853216] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5472e0a7-a69a-4211-86bb-156b0799f8ee tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquiring lock "refresh_cache-ec0a05fc-4a11-4e07-a03c-e357a7a750ab" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1980.853409] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5472e0a7-a69a-4211-86bb-156b0799f8ee tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquired lock "refresh_cache-ec0a05fc-4a11-4e07-a03c-e357a7a750ab" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1980.853708] env[62405]: DEBUG nova.network.neutron [None req-5472e0a7-a69a-4211-86bb-156b0799f8ee tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: ec0a05fc-4a11-4e07-a03c-e357a7a750ab] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1981.030299] env[62405]: DEBUG oslo_vmware.api [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947975, 'name': PowerOnVM_Task, 'duration_secs': 0.61048} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1981.030612] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: d937c90c-10b2-4c57-b1db-7b433c3d9017] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1981.030771] env[62405]: INFO nova.compute.manager [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: d937c90c-10b2-4c57-b1db-7b433c3d9017] Took 12.44 seconds to spawn the instance on the hypervisor. [ 1981.030948] env[62405]: DEBUG nova.compute.manager [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: d937c90c-10b2-4c57-b1db-7b433c3d9017] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1981.031957] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82643242-936f-4e49-ae9a-c49e5fa402f0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.054391] env[62405]: DEBUG nova.compute.utils [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1981.059298] env[62405]: DEBUG nova.compute.manager [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1981.059298] env[62405]: DEBUG nova.network.neutron [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1981.125389] env[62405]: DEBUG oslo_vmware.rw_handles [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Completed reading data from the image iterator. {{(pid=62405) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1981.125962] env[62405]: DEBUG oslo_vmware.rw_handles [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5280a21c-3450-ce54-b3c9-72bfd5f4c03b/disk-0.vmdk. {{(pid=62405) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1981.127285] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bf580e2-4e2f-499a-8f12-ff46c71f81b0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.131608] env[62405]: DEBUG nova.policy [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6eea29f093ad409eb10eb3b50c194ff3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7541d8c77a3f434094bc30a4d402bfcb', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1981.147219] env[62405]: DEBUG oslo_vmware.rw_handles [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5280a21c-3450-ce54-b3c9-72bfd5f4c03b/disk-0.vmdk is in state: ready. {{(pid=62405) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1981.147219] env[62405]: DEBUG oslo_vmware.rw_handles [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Releasing lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5280a21c-3450-ce54-b3c9-72bfd5f4c03b/disk-0.vmdk. {{(pid=62405) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1981.151051] env[62405]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-a7a1cf19-03b1-4c75-aa5c-b5c6b3a301b7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.152067] env[62405]: DEBUG oslo_vmware.api [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1947977, 'name': CloneVM_Task} progress is 94%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1981.181967] env[62405]: DEBUG nova.objects.instance [None req-2ad2a64f-16f2-4d10-8ae3-d9b120096747 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Lazy-loading 'pci_requests' on Instance uuid 556e1bca-f2f1-4200-96df-997d48ce5a15 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1981.377511] env[62405]: DEBUG oslo_vmware.rw_handles [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Closed VMDK write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5280a21c-3450-ce54-b3c9-72bfd5f4c03b/disk-0.vmdk. {{(pid=62405) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1981.377734] env[62405]: INFO nova.virt.vmwareapi.images [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Downloaded image file data 6ee02a73-5729-47e1-93a1-23fefdcafc1e [ 1981.378575] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e25b4bac-43c4-465e-8804-a29f6812ce55 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.398526] env[62405]: DEBUG nova.objects.instance [None req-0b33cf7d-6f64-4ab9-8082-abc70e3938d0 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Lazy-loading 'flavor' on Instance uuid d186b2f4-3fd1-44be-b8a4-080972aff3a0 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1981.399931] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-816c145b-37aa-4c26-a3c9-96c3db08bda4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.427536] env[62405]: INFO nova.virt.vmwareapi.images [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] The imported VM was unregistered [ 1981.430574] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Caching image {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1981.430824] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Creating directory with path [datastore1] devstack-image-cache_base/6ee02a73-5729-47e1-93a1-23fefdcafc1e {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1981.434132] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ca407573-0665-4aac-997b-10582a59a525 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.451247] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Created directory with path [datastore1] devstack-image-cache_base/6ee02a73-5729-47e1-93a1-23fefdcafc1e {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1981.451467] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_4808a882-df74-42fc-925f-5516b3e0adb3/OSTACK_IMG_4808a882-df74-42fc-925f-5516b3e0adb3.vmdk to [datastore1] devstack-image-cache_base/6ee02a73-5729-47e1-93a1-23fefdcafc1e/6ee02a73-5729-47e1-93a1-23fefdcafc1e.vmdk. {{(pid=62405) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1981.451839] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-c6b796cb-84bd-464a-a18e-c320415e9ea6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.465173] env[62405]: DEBUG oslo_vmware.api [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Waiting for the task: (returnval){ [ 1981.465173] env[62405]: value = "task-1947979" [ 1981.465173] env[62405]: _type = "Task" [ 1981.465173] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1981.475914] env[62405]: DEBUG oslo_vmware.api [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': task-1947979, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1981.550604] env[62405]: INFO nova.compute.manager [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: d937c90c-10b2-4c57-b1db-7b433c3d9017] Took 28.67 seconds to build instance. [ 1981.560374] env[62405]: DEBUG nova.network.neutron [req-d772ccb3-7c4b-4b19-8bb8-27937a1c0f91 req-56297e7c-c918-41a3-8dcb-049488b9fd80 service nova] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] Updated VIF entry in instance network info cache for port d440b728-2371-4e75-bb9f-2330f0318cae. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1981.560796] env[62405]: DEBUG nova.network.neutron [req-d772ccb3-7c4b-4b19-8bb8-27937a1c0f91 req-56297e7c-c918-41a3-8dcb-049488b9fd80 service nova] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] Updating instance_info_cache with network_info: [{"id": "d440b728-2371-4e75-bb9f-2330f0318cae", "address": "fa:16:3e:ea:15:ee", "network": {"id": "8e7f7222-48db-4dd5-a9e8-9a6d2b598918", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1945720715-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.152", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba9083cddcc24345b6ea5d2cbbbec5ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd440b728-23", "ovs_interfaceid": "d440b728-2371-4e75-bb9f-2330f0318cae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1981.562944] env[62405]: DEBUG nova.compute.manager [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1981.595233] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df6bc734-5607-45a2-abbb-a7e6ca22390f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.607807] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e33fa99-8638-4275-a5ae-69c7e20a9ade {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.640690] env[62405]: DEBUG nova.network.neutron [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Successfully created port: 2ba16494-2db9-4083-9a27-d4f12dac6ba1 {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1981.645814] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4056dfac-8b90-460b-833a-9226667e73ad {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.656026] env[62405]: DEBUG oslo_vmware.api [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1947977, 'name': CloneVM_Task} progress is 94%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1981.660212] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-446d2537-3c78-40ec-ab73-5b4b3822f0d4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.676333] env[62405]: DEBUG nova.compute.provider_tree [None req-59a47073-dc2f-443a-ab69-a2287d354fcb tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1981.685045] env[62405]: DEBUG nova.objects.base [None req-2ad2a64f-16f2-4d10-8ae3-d9b120096747 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Object Instance<556e1bca-f2f1-4200-96df-997d48ce5a15> lazy-loaded attributes: flavor,pci_requests {{(pid=62405) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1981.685045] env[62405]: DEBUG nova.network.neutron [None req-2ad2a64f-16f2-4d10-8ae3-d9b120096747 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1981.691249] env[62405]: DEBUG nova.compute.manager [req-51f1c4e9-d165-4dca-868a-9ca8ae75c2f8 req-50f5c0ac-dfeb-4682-9861-428905bdc232 service nova] [instance: 14512ed2-9eae-4753-b83c-8c0d0d5d9432] Received event network-changed-fb939ba3-6c42-4855-80a4-e268dd0bbe54 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1981.691437] env[62405]: DEBUG nova.compute.manager [req-51f1c4e9-d165-4dca-868a-9ca8ae75c2f8 req-50f5c0ac-dfeb-4682-9861-428905bdc232 service nova] [instance: 14512ed2-9eae-4753-b83c-8c0d0d5d9432] Refreshing instance network info cache due to event network-changed-fb939ba3-6c42-4855-80a4-e268dd0bbe54. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1981.691651] env[62405]: DEBUG oslo_concurrency.lockutils [req-51f1c4e9-d165-4dca-868a-9ca8ae75c2f8 req-50f5c0ac-dfeb-4682-9861-428905bdc232 service nova] Acquiring lock "refresh_cache-14512ed2-9eae-4753-b83c-8c0d0d5d9432" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1981.691797] env[62405]: DEBUG oslo_concurrency.lockutils [req-51f1c4e9-d165-4dca-868a-9ca8ae75c2f8 req-50f5c0ac-dfeb-4682-9861-428905bdc232 service nova] Acquired lock "refresh_cache-14512ed2-9eae-4753-b83c-8c0d0d5d9432" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1981.691963] env[62405]: DEBUG nova.network.neutron [req-51f1c4e9-d165-4dca-868a-9ca8ae75c2f8 req-50f5c0ac-dfeb-4682-9861-428905bdc232 service nova] [instance: 14512ed2-9eae-4753-b83c-8c0d0d5d9432] Refreshing network info cache for port fb939ba3-6c42-4855-80a4-e268dd0bbe54 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1981.766300] env[62405]: DEBUG nova.network.neutron [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 5645c9a4-2640-4190-956f-00fc2ea03a3a] Successfully updated port: 0830bd57-ad4c-4775-9f4b-826fad7b43ed {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1981.794516] env[62405]: DEBUG nova.policy [None req-2ad2a64f-16f2-4d10-8ae3-d9b120096747 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '13540c2dbc2b43bcb151ec7b5894904c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ba9083cddcc24345b6ea5d2cbbbec5ba', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1981.914801] env[62405]: DEBUG nova.network.neutron [None req-5472e0a7-a69a-4211-86bb-156b0799f8ee tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: ec0a05fc-4a11-4e07-a03c-e357a7a750ab] Updating instance_info_cache with network_info: [{"id": "1336ca88-2020-4b2c-b082-e45e1fe68506", "address": "fa:16:3e:d9:07:e2", "network": {"id": "7398b956-045a-440c-b8fc-34bad57dbc27", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1969082667-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "41626e27199f4370a2554bb243a72d41", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "50171613-b419-45e3-9ada-fcb6cd921428", "external-id": "nsx-vlan-transportzone-914", "segmentation_id": 914, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1336ca88-20", "ovs_interfaceid": "1336ca88-2020-4b2c-b082-e45e1fe68506", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1981.980504] env[62405]: DEBUG oslo_vmware.api [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': task-1947979, 'name': MoveVirtualDisk_Task} progress is 9%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1982.055952] env[62405]: DEBUG oslo_concurrency.lockutils [None req-fe87da43-3dd2-4172-bcd2-1615c1dff432 tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Lock "d937c90c-10b2-4c57-b1db-7b433c3d9017" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.180s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1982.066995] env[62405]: DEBUG oslo_concurrency.lockutils [req-d772ccb3-7c4b-4b19-8bb8-27937a1c0f91 req-56297e7c-c918-41a3-8dcb-049488b9fd80 service nova] Releasing lock "refresh_cache-556e1bca-f2f1-4200-96df-997d48ce5a15" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1982.154249] env[62405]: DEBUG oslo_vmware.api [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1947977, 'name': CloneVM_Task} progress is 95%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1982.181782] env[62405]: DEBUG nova.scheduler.client.report [None req-59a47073-dc2f-443a-ab69-a2287d354fcb tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1982.271027] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Acquiring lock "refresh_cache-5645c9a4-2640-4190-956f-00fc2ea03a3a" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1982.271027] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Acquired lock "refresh_cache-5645c9a4-2640-4190-956f-00fc2ea03a3a" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1982.271027] env[62405]: DEBUG nova.network.neutron [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 5645c9a4-2640-4190-956f-00fc2ea03a3a] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1982.411036] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0b33cf7d-6f64-4ab9-8082-abc70e3938d0 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Lock "d186b2f4-3fd1-44be-b8a4-080972aff3a0" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.337s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1982.417335] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5472e0a7-a69a-4211-86bb-156b0799f8ee tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Releasing lock "refresh_cache-ec0a05fc-4a11-4e07-a03c-e357a7a750ab" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1982.486447] env[62405]: DEBUG oslo_vmware.api [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': task-1947979, 'name': MoveVirtualDisk_Task} progress is 29%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1982.569943] env[62405]: DEBUG nova.network.neutron [req-51f1c4e9-d165-4dca-868a-9ca8ae75c2f8 req-50f5c0ac-dfeb-4682-9861-428905bdc232 service nova] [instance: 14512ed2-9eae-4753-b83c-8c0d0d5d9432] Updated VIF entry in instance network info cache for port fb939ba3-6c42-4855-80a4-e268dd0bbe54. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1982.570412] env[62405]: DEBUG nova.network.neutron [req-51f1c4e9-d165-4dca-868a-9ca8ae75c2f8 req-50f5c0ac-dfeb-4682-9861-428905bdc232 service nova] [instance: 14512ed2-9eae-4753-b83c-8c0d0d5d9432] Updating instance_info_cache with network_info: [{"id": "fb939ba3-6c42-4855-80a4-e268dd0bbe54", "address": "fa:16:3e:56:d7:78", "network": {"id": "dfd15c21-b7a5-492a-afe3-8eb96515351d", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-175692276-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.150", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6014bab6bc9a4b059bab88e44b31f446", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "950a2f67-7668-4376-9d48-b38dca033c40", "external-id": "nsx-vlan-transportzone-549", "segmentation_id": 549, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfb939ba3-6c", "ovs_interfaceid": "fb939ba3-6c42-4855-80a4-e268dd0bbe54", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1982.575572] env[62405]: DEBUG nova.compute.manager [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1982.607572] env[62405]: DEBUG nova.virt.hardware [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1982.607872] env[62405]: DEBUG nova.virt.hardware [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1982.608252] env[62405]: DEBUG nova.virt.hardware [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1982.608252] env[62405]: DEBUG nova.virt.hardware [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1982.608464] env[62405]: DEBUG nova.virt.hardware [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1982.608629] env[62405]: DEBUG nova.virt.hardware [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1982.608931] env[62405]: DEBUG nova.virt.hardware [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1982.609122] env[62405]: DEBUG nova.virt.hardware [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1982.609482] env[62405]: DEBUG nova.virt.hardware [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1982.609680] env[62405]: DEBUG nova.virt.hardware [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1982.609923] env[62405]: DEBUG nova.virt.hardware [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1982.610975] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52cdd20c-c6da-4287-9b64-63d74b7ab5d9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.621600] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10bbe1b2-2f20-48f2-b0c8-c07f9f70f29c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.626810] env[62405]: INFO nova.compute.manager [None req-61a37609-b77f-4af5-abb1-aa836364e517 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Rebuilding instance [ 1982.652669] env[62405]: DEBUG oslo_vmware.api [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1947977, 'name': CloneVM_Task} progress is 100%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1982.690030] env[62405]: DEBUG oslo_concurrency.lockutils [None req-59a47073-dc2f-443a-ab69-a2287d354fcb tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.147s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1982.692390] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8f178103-3f5f-4a58-a9e8-b71478d977af tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 2.968s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1982.692591] env[62405]: DEBUG nova.objects.instance [None req-8f178103-3f5f-4a58-a9e8-b71478d977af tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62405) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1982.699505] env[62405]: DEBUG nova.compute.manager [None req-61a37609-b77f-4af5-abb1-aa836364e517 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1982.702447] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0bf92c9-18ee-48d8-95e7-422c88c96833 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.715186] env[62405]: INFO nova.scheduler.client.report [None req-59a47073-dc2f-443a-ab69-a2287d354fcb tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Deleted allocations for instance 1b820a12-4ca5-4b89-9016-81ebac4f1c3b [ 1982.807649] env[62405]: DEBUG nova.network.neutron [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 5645c9a4-2640-4190-956f-00fc2ea03a3a] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1982.948439] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0254a78-1aba-441e-8cc2-361c615a64e6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.975061] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4312442-c29f-4dca-bca7-c76c870e7f2a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.982904] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-5472e0a7-a69a-4211-86bb-156b0799f8ee tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: ec0a05fc-4a11-4e07-a03c-e357a7a750ab] Updating instance 'ec0a05fc-4a11-4e07-a03c-e357a7a750ab' progress to 83 {{(pid=62405) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1982.989730] env[62405]: DEBUG oslo_vmware.api [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': task-1947979, 'name': MoveVirtualDisk_Task} progress is 49%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1983.074505] env[62405]: DEBUG oslo_concurrency.lockutils [req-51f1c4e9-d165-4dca-868a-9ca8ae75c2f8 req-50f5c0ac-dfeb-4682-9861-428905bdc232 service nova] Releasing lock "refresh_cache-14512ed2-9eae-4753-b83c-8c0d0d5d9432" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1983.098509] env[62405]: DEBUG nova.network.neutron [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 5645c9a4-2640-4190-956f-00fc2ea03a3a] Updating instance_info_cache with network_info: [{"id": "0830bd57-ad4c-4775-9f4b-826fad7b43ed", "address": "fa:16:3e:ff:dc:40", "network": {"id": "c3707aa8-0488-4e47-a655-7bb7788995c0", "bridge": "br-int", "label": "tempest-ServersTestJSON-2054002489-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ba2fba100b943a2a415ec37b9365388", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "68ec9c06-8680-4a41-abad-cddbd1f768c9", "external-id": "nsx-vlan-transportzone-883", "segmentation_id": 883, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0830bd57-ad", "ovs_interfaceid": "0830bd57-ad4c-4775-9f4b-826fad7b43ed", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1983.152814] env[62405]: DEBUG oslo_vmware.api [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1947977, 'name': CloneVM_Task, 'duration_secs': 2.090247} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1983.153101] env[62405]: INFO nova.virt.vmwareapi.vmops [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Created linked-clone VM from snapshot [ 1983.153864] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fdc3b75-9a40-42c4-9173-87d559178fb6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.163648] env[62405]: DEBUG nova.virt.vmwareapi.images [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Uploading image ca187b4d-a52b-4628-a4f9-f6cf89613d47 {{(pid=62405) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1983.194748] env[62405]: DEBUG oslo_vmware.rw_handles [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1983.194748] env[62405]: value = "vm-401563" [ 1983.194748] env[62405]: _type = "VirtualMachine" [ 1983.194748] env[62405]: }. {{(pid=62405) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1983.194998] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-0f931ac0-309d-41e1-aa3d-36890f7487f3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.210307] env[62405]: DEBUG oslo_vmware.rw_handles [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Lease: (returnval){ [ 1983.210307] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52842a60-3496-e37e-9b38-7dc423325c56" [ 1983.210307] env[62405]: _type = "HttpNfcLease" [ 1983.210307] env[62405]: } obtained for exporting VM: (result){ [ 1983.210307] env[62405]: value = "vm-401563" [ 1983.210307] env[62405]: _type = "VirtualMachine" [ 1983.210307] env[62405]: }. {{(pid=62405) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1983.210307] env[62405]: DEBUG oslo_vmware.api [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Waiting for the lease: (returnval){ [ 1983.210307] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52842a60-3496-e37e-9b38-7dc423325c56" [ 1983.210307] env[62405]: _type = "HttpNfcLease" [ 1983.210307] env[62405]: } to be ready. {{(pid=62405) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1983.210307] env[62405]: DEBUG oslo_concurrency.lockutils [None req-94fdf5db-04b1-4fab-bd2b-03c6f4f9055c tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Acquiring lock "d186b2f4-3fd1-44be-b8a4-080972aff3a0" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1983.210307] env[62405]: DEBUG oslo_concurrency.lockutils [None req-94fdf5db-04b1-4fab-bd2b-03c6f4f9055c tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Lock "d186b2f4-3fd1-44be-b8a4-080972aff3a0" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1983.210307] env[62405]: DEBUG nova.compute.manager [None req-94fdf5db-04b1-4fab-bd2b-03c6f4f9055c tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1983.215295] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7f4f1c9-e513-4f8f-9ca5-aa5bbf90a05e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.223708] env[62405]: DEBUG nova.compute.manager [None req-94fdf5db-04b1-4fab-bd2b-03c6f4f9055c tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62405) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1983.224364] env[62405]: DEBUG nova.objects.instance [None req-94fdf5db-04b1-4fab-bd2b-03c6f4f9055c tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Lazy-loading 'flavor' on Instance uuid d186b2f4-3fd1-44be-b8a4-080972aff3a0 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1983.231777] env[62405]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1983.231777] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52842a60-3496-e37e-9b38-7dc423325c56" [ 1983.231777] env[62405]: _type = "HttpNfcLease" [ 1983.231777] env[62405]: } is ready. {{(pid=62405) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1983.232607] env[62405]: DEBUG oslo_concurrency.lockutils [None req-59a47073-dc2f-443a-ab69-a2287d354fcb tempest-ImagesOneServerNegativeTestJSON-728935096 tempest-ImagesOneServerNegativeTestJSON-728935096-project-member] Lock "1b820a12-4ca5-4b89-9016-81ebac4f1c3b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.731s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1983.233794] env[62405]: DEBUG oslo_vmware.rw_handles [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1983.233794] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52842a60-3496-e37e-9b38-7dc423325c56" [ 1983.233794] env[62405]: _type = "HttpNfcLease" [ 1983.233794] env[62405]: }. {{(pid=62405) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1983.234845] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e864652-4a75-472f-8492-0a5486347146 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.249698] env[62405]: DEBUG oslo_vmware.rw_handles [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5200af3a-52d9-e669-3d5b-34d3114918eb/disk-0.vmdk from lease info. {{(pid=62405) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1983.249698] env[62405]: DEBUG oslo_vmware.rw_handles [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5200af3a-52d9-e669-3d5b-34d3114918eb/disk-0.vmdk for reading. {{(pid=62405) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1983.388543] env[62405]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-897a6b80-d51c-418d-8786-80ae1e18f158 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.483780] env[62405]: DEBUG oslo_vmware.api [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': task-1947979, 'name': MoveVirtualDisk_Task} progress is 69%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1983.493294] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-5472e0a7-a69a-4211-86bb-156b0799f8ee tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: ec0a05fc-4a11-4e07-a03c-e357a7a750ab] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1983.494949] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c3b9721e-06e8-4eea-923e-6ac033756872 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.502152] env[62405]: DEBUG oslo_vmware.api [None req-5472e0a7-a69a-4211-86bb-156b0799f8ee tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for the task: (returnval){ [ 1983.502152] env[62405]: value = "task-1947981" [ 1983.502152] env[62405]: _type = "Task" [ 1983.502152] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1983.511878] env[62405]: DEBUG oslo_vmware.api [None req-5472e0a7-a69a-4211-86bb-156b0799f8ee tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947981, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1983.602138] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Releasing lock "refresh_cache-5645c9a4-2640-4190-956f-00fc2ea03a3a" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1983.602747] env[62405]: DEBUG nova.compute.manager [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 5645c9a4-2640-4190-956f-00fc2ea03a3a] Instance network_info: |[{"id": "0830bd57-ad4c-4775-9f4b-826fad7b43ed", "address": "fa:16:3e:ff:dc:40", "network": {"id": "c3707aa8-0488-4e47-a655-7bb7788995c0", "bridge": "br-int", "label": "tempest-ServersTestJSON-2054002489-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ba2fba100b943a2a415ec37b9365388", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "68ec9c06-8680-4a41-abad-cddbd1f768c9", "external-id": "nsx-vlan-transportzone-883", "segmentation_id": 883, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0830bd57-ad", "ovs_interfaceid": "0830bd57-ad4c-4775-9f4b-826fad7b43ed", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1983.603170] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 5645c9a4-2640-4190-956f-00fc2ea03a3a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ff:dc:40', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '68ec9c06-8680-4a41-abad-cddbd1f768c9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0830bd57-ad4c-4775-9f4b-826fad7b43ed', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1983.620613] env[62405]: DEBUG oslo.service.loopingcall [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1983.620892] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5645c9a4-2640-4190-956f-00fc2ea03a3a] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1983.621190] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4e293172-d9a0-4bc4-a8e5-d1690bc0d517 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.643505] env[62405]: DEBUG nova.network.neutron [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Successfully updated port: 2ba16494-2db9-4083-9a27-d4f12dac6ba1 {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1983.657769] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1983.657769] env[62405]: value = "task-1947982" [ 1983.657769] env[62405]: _type = "Task" [ 1983.657769] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1983.677758] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947982, 'name': CreateVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1983.705076] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8f178103-3f5f-4a58-a9e8-b71478d977af tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.011s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1983.705076] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 3.032s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1983.736159] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-61a37609-b77f-4af5-abb1-aa836364e517 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1983.736159] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2fc32ac4-34a1-42ef-b4c3-e955d8c2c153 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.751307] env[62405]: DEBUG oslo_vmware.api [None req-61a37609-b77f-4af5-abb1-aa836364e517 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Waiting for the task: (returnval){ [ 1983.751307] env[62405]: value = "task-1947983" [ 1983.751307] env[62405]: _type = "Task" [ 1983.751307] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1983.764631] env[62405]: DEBUG oslo_vmware.api [None req-61a37609-b77f-4af5-abb1-aa836364e517 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1947983, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1983.765842] env[62405]: DEBUG nova.network.neutron [None req-2ad2a64f-16f2-4d10-8ae3-d9b120096747 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] Successfully updated port: e5b357f2-b442-4514-aa4d-9234dfa04642 {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1983.982472] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Acquiring lock "989a7146-71ea-433b-86f9-b7a0f0ee91b4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1983.984055] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Lock "989a7146-71ea-433b-86f9-b7a0f0ee91b4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1983.989724] env[62405]: DEBUG oslo_vmware.api [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': task-1947979, 'name': MoveVirtualDisk_Task} progress is 88%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1984.016258] env[62405]: DEBUG oslo_vmware.api [None req-5472e0a7-a69a-4211-86bb-156b0799f8ee tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947981, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1984.153636] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Acquiring lock "refresh_cache-81d9be97-9147-4754-80c2-68c1a389842e" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1984.153636] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Acquired lock "refresh_cache-81d9be97-9147-4754-80c2-68c1a389842e" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1984.153636] env[62405]: DEBUG nova.network.neutron [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1984.172720] env[62405]: DEBUG nova.compute.manager [req-319a9ef4-7307-4e5b-8248-24e44caad6f1 req-faa4606c-e6ed-41f8-8017-e8ecc09572a0 service nova] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Received event network-vif-plugged-2ba16494-2db9-4083-9a27-d4f12dac6ba1 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1984.173500] env[62405]: DEBUG oslo_concurrency.lockutils [req-319a9ef4-7307-4e5b-8248-24e44caad6f1 req-faa4606c-e6ed-41f8-8017-e8ecc09572a0 service nova] Acquiring lock "81d9be97-9147-4754-80c2-68c1a389842e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1984.173888] env[62405]: DEBUG oslo_concurrency.lockutils [req-319a9ef4-7307-4e5b-8248-24e44caad6f1 req-faa4606c-e6ed-41f8-8017-e8ecc09572a0 service nova] Lock "81d9be97-9147-4754-80c2-68c1a389842e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1984.174159] env[62405]: DEBUG oslo_concurrency.lockutils [req-319a9ef4-7307-4e5b-8248-24e44caad6f1 req-faa4606c-e6ed-41f8-8017-e8ecc09572a0 service nova] Lock "81d9be97-9147-4754-80c2-68c1a389842e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1984.174669] env[62405]: DEBUG nova.compute.manager [req-319a9ef4-7307-4e5b-8248-24e44caad6f1 req-faa4606c-e6ed-41f8-8017-e8ecc09572a0 service nova] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] No waiting events found dispatching network-vif-plugged-2ba16494-2db9-4083-9a27-d4f12dac6ba1 {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1984.175102] env[62405]: WARNING nova.compute.manager [req-319a9ef4-7307-4e5b-8248-24e44caad6f1 req-faa4606c-e6ed-41f8-8017-e8ecc09572a0 service nova] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Received unexpected event network-vif-plugged-2ba16494-2db9-4083-9a27-d4f12dac6ba1 for instance with vm_state building and task_state spawning. [ 1984.186132] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947982, 'name': CreateVM_Task} progress is 25%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1984.242743] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-94fdf5db-04b1-4fab-bd2b-03c6f4f9055c tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1984.243073] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c365c10e-0bd7-4425-a7be-7f26d0cfa17c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.256023] env[62405]: DEBUG oslo_vmware.api [None req-94fdf5db-04b1-4fab-bd2b-03c6f4f9055c tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Waiting for the task: (returnval){ [ 1984.256023] env[62405]: value = "task-1947984" [ 1984.256023] env[62405]: _type = "Task" [ 1984.256023] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1984.268684] env[62405]: DEBUG oslo_vmware.api [None req-61a37609-b77f-4af5-abb1-aa836364e517 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1947983, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1984.275340] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2ad2a64f-16f2-4d10-8ae3-d9b120096747 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquiring lock "refresh_cache-556e1bca-f2f1-4200-96df-997d48ce5a15" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1984.275561] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2ad2a64f-16f2-4d10-8ae3-d9b120096747 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquired lock "refresh_cache-556e1bca-f2f1-4200-96df-997d48ce5a15" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1984.275807] env[62405]: DEBUG nova.network.neutron [None req-2ad2a64f-16f2-4d10-8ae3-d9b120096747 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1984.277233] env[62405]: DEBUG oslo_vmware.api [None req-94fdf5db-04b1-4fab-bd2b-03c6f4f9055c tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Task: {'id': task-1947984, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1984.482704] env[62405]: DEBUG oslo_vmware.api [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': task-1947979, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.838152} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1984.483896] env[62405]: INFO nova.virt.vmwareapi.ds_util [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_4808a882-df74-42fc-925f-5516b3e0adb3/OSTACK_IMG_4808a882-df74-42fc-925f-5516b3e0adb3.vmdk to [datastore1] devstack-image-cache_base/6ee02a73-5729-47e1-93a1-23fefdcafc1e/6ee02a73-5729-47e1-93a1-23fefdcafc1e.vmdk. [ 1984.483896] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Cleaning up location [datastore1] OSTACK_IMG_4808a882-df74-42fc-925f-5516b3e0adb3 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1984.483896] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_4808a882-df74-42fc-925f-5516b3e0adb3 {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1984.484108] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-865fb265-c804-433c-b83c-9ef726dbb41f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.486117] env[62405]: DEBUG nova.compute.manager [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] [instance: 989a7146-71ea-433b-86f9-b7a0f0ee91b4] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1984.494224] env[62405]: DEBUG oslo_vmware.api [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Waiting for the task: (returnval){ [ 1984.494224] env[62405]: value = "task-1947985" [ 1984.494224] env[62405]: _type = "Task" [ 1984.494224] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1984.504103] env[62405]: DEBUG oslo_vmware.api [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': task-1947985, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1984.514634] env[62405]: DEBUG oslo_vmware.api [None req-5472e0a7-a69a-4211-86bb-156b0799f8ee tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947981, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1984.596977] env[62405]: DEBUG nova.compute.manager [req-a954133e-1387-4c44-9136-579944f5a665 req-41cd30fe-e3b4-4196-9482-f3ed3808e592 service nova] [instance: 5645c9a4-2640-4190-956f-00fc2ea03a3a] Received event network-vif-plugged-0830bd57-ad4c-4775-9f4b-826fad7b43ed {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1984.597359] env[62405]: DEBUG oslo_concurrency.lockutils [req-a954133e-1387-4c44-9136-579944f5a665 req-41cd30fe-e3b4-4196-9482-f3ed3808e592 service nova] Acquiring lock "5645c9a4-2640-4190-956f-00fc2ea03a3a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1984.597640] env[62405]: DEBUG oslo_concurrency.lockutils [req-a954133e-1387-4c44-9136-579944f5a665 req-41cd30fe-e3b4-4196-9482-f3ed3808e592 service nova] Lock "5645c9a4-2640-4190-956f-00fc2ea03a3a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1984.597929] env[62405]: DEBUG oslo_concurrency.lockutils [req-a954133e-1387-4c44-9136-579944f5a665 req-41cd30fe-e3b4-4196-9482-f3ed3808e592 service nova] Lock "5645c9a4-2640-4190-956f-00fc2ea03a3a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1984.598190] env[62405]: DEBUG nova.compute.manager [req-a954133e-1387-4c44-9136-579944f5a665 req-41cd30fe-e3b4-4196-9482-f3ed3808e592 service nova] [instance: 5645c9a4-2640-4190-956f-00fc2ea03a3a] No waiting events found dispatching network-vif-plugged-0830bd57-ad4c-4775-9f4b-826fad7b43ed {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1984.598376] env[62405]: WARNING nova.compute.manager [req-a954133e-1387-4c44-9136-579944f5a665 req-41cd30fe-e3b4-4196-9482-f3ed3808e592 service nova] [instance: 5645c9a4-2640-4190-956f-00fc2ea03a3a] Received unexpected event network-vif-plugged-0830bd57-ad4c-4775-9f4b-826fad7b43ed for instance with vm_state building and task_state spawning. [ 1984.598558] env[62405]: DEBUG nova.compute.manager [req-a954133e-1387-4c44-9136-579944f5a665 req-41cd30fe-e3b4-4196-9482-f3ed3808e592 service nova] [instance: 5645c9a4-2640-4190-956f-00fc2ea03a3a] Received event network-changed-0830bd57-ad4c-4775-9f4b-826fad7b43ed {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1984.598759] env[62405]: DEBUG nova.compute.manager [req-a954133e-1387-4c44-9136-579944f5a665 req-41cd30fe-e3b4-4196-9482-f3ed3808e592 service nova] [instance: 5645c9a4-2640-4190-956f-00fc2ea03a3a] Refreshing instance network info cache due to event network-changed-0830bd57-ad4c-4775-9f4b-826fad7b43ed. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1984.598969] env[62405]: DEBUG oslo_concurrency.lockutils [req-a954133e-1387-4c44-9136-579944f5a665 req-41cd30fe-e3b4-4196-9482-f3ed3808e592 service nova] Acquiring lock "refresh_cache-5645c9a4-2640-4190-956f-00fc2ea03a3a" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1984.599131] env[62405]: DEBUG oslo_concurrency.lockutils [req-a954133e-1387-4c44-9136-579944f5a665 req-41cd30fe-e3b4-4196-9482-f3ed3808e592 service nova] Acquired lock "refresh_cache-5645c9a4-2640-4190-956f-00fc2ea03a3a" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1984.599633] env[62405]: DEBUG nova.network.neutron [req-a954133e-1387-4c44-9136-579944f5a665 req-41cd30fe-e3b4-4196-9482-f3ed3808e592 service nova] [instance: 5645c9a4-2640-4190-956f-00fc2ea03a3a] Refreshing network info cache for port 0830bd57-ad4c-4775-9f4b-826fad7b43ed {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1984.677079] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947982, 'name': CreateVM_Task, 'duration_secs': 0.725205} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1984.677360] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5645c9a4-2640-4190-956f-00fc2ea03a3a] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1984.679247] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1984.679247] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1984.679247] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1984.679626] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aa3303c5-84a6-43e5-b295-af728f24898f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.685088] env[62405]: DEBUG oslo_vmware.api [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Waiting for the task: (returnval){ [ 1984.685088] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52f328b8-e003-6281-380c-7d3b5dfd0b95" [ 1984.685088] env[62405]: _type = "Task" [ 1984.685088] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1984.693016] env[62405]: DEBUG oslo_vmware.api [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52f328b8-e003-6281-380c-7d3b5dfd0b95, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1984.705083] env[62405]: DEBUG nova.network.neutron [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1984.728778] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Applying migration context for instance ec0a05fc-4a11-4e07-a03c-e357a7a750ab as it has an incoming, in-progress migration 215904f6-d5c8-46ab-a546-1d37550b6512. Migration status is post-migrating {{(pid=62405) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1984.731209] env[62405]: INFO nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: ec0a05fc-4a11-4e07-a03c-e357a7a750ab] Updating resource usage from migration 215904f6-d5c8-46ab-a546-1d37550b6512 [ 1984.771050] env[62405]: DEBUG oslo_vmware.api [None req-61a37609-b77f-4af5-abb1-aa836364e517 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1947983, 'name': PowerOffVM_Task, 'duration_secs': 0.578731} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1984.771050] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-61a37609-b77f-4af5-abb1-aa836364e517 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1984.771050] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-61a37609-b77f-4af5-abb1-aa836364e517 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1984.771050] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4d3d7a2-06b4-4062-a96d-ef7a04aff2bd {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.778938] env[62405]: DEBUG oslo_vmware.api [None req-94fdf5db-04b1-4fab-bd2b-03c6f4f9055c tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Task: {'id': task-1947984, 'name': PowerOffVM_Task, 'duration_secs': 0.235583} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1984.778938] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-94fdf5db-04b1-4fab-bd2b-03c6f4f9055c tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1984.778938] env[62405]: DEBUG nova.compute.manager [None req-94fdf5db-04b1-4fab-bd2b-03c6f4f9055c tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1984.781286] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6260c483-fdf4-4d33-afa4-bb479b089dc8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.789814] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 4d59d9fd-23df-4933-97ed-32602e51e9aa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1984.789814] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance d186b2f4-3fd1-44be-b8a4-080972aff3a0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1984.790784] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance c39d9059-8da4-4c8d-99ab-d66b8445e7da actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1984.790784] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1984.790784] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 41e5385d-f0c7-4431-8424-e60dbeebaf8e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1984.790784] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance b495f9e6-60c8-4509-a34f-2e7ed59b6d82 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1984.790784] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 6fcfada3-d73a-4814-bf45-d34b26d76d4a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1984.790784] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance f269844b-a9b4-40a2-8ba4-a62ee59b4e40 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1984.790994] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance a91a6d04-2ec0-4568-bdb3-732d148644de actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1984.791214] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 0d2b305d-d754-413c-afdf-3a2e8f143891 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1984.791441] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 556e1bca-f2f1-4200-96df-997d48ce5a15 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1984.791823] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance f16e3d13-6db6-4f61-b0e4-661856a9166b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1984.791882] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance ca0ff947-1ae0-4f19-ae71-0784f2c20ebe actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1984.792047] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 15718289-5c19-4c2d-a9d8-d30ce0d63c68 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1984.792298] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 14512ed2-9eae-4753-b83c-8c0d0d5d9432 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1984.792445] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance d937c90c-10b2-4c57-b1db-7b433c3d9017 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1984.792662] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Migration 215904f6-d5c8-46ab-a546-1d37550b6512 is active on this compute host and has allocations in placement: {'resources': {'VCPU': 1, 'MEMORY_MB': 192, 'DISK_GB': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1984.793020] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance ec0a05fc-4a11-4e07-a03c-e357a7a750ab actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1984.793282] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 5645c9a4-2640-4190-956f-00fc2ea03a3a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1984.793434] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 81d9be97-9147-4754-80c2-68c1a389842e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1984.795261] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-61a37609-b77f-4af5-abb1-aa836364e517 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1984.799768] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-80de33c2-6dc2-4361-943b-abe2a88f6785 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.848124] env[62405]: WARNING nova.network.neutron [None req-2ad2a64f-16f2-4d10-8ae3-d9b120096747 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] 8e7f7222-48db-4dd5-a9e8-9a6d2b598918 already exists in list: networks containing: ['8e7f7222-48db-4dd5-a9e8-9a6d2b598918']. ignoring it [ 1984.905899] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-61a37609-b77f-4af5-abb1-aa836364e517 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1984.906449] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-61a37609-b77f-4af5-abb1-aa836364e517 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1984.906688] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-61a37609-b77f-4af5-abb1-aa836364e517 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Deleting the datastore file [datastore1] 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1984.906997] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a401e089-4b96-45c4-8065-57b0b243841e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.917606] env[62405]: DEBUG oslo_vmware.api [None req-61a37609-b77f-4af5-abb1-aa836364e517 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Waiting for the task: (returnval){ [ 1984.917606] env[62405]: value = "task-1947987" [ 1984.917606] env[62405]: _type = "Task" [ 1984.917606] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1984.926810] env[62405]: DEBUG oslo_vmware.api [None req-61a37609-b77f-4af5-abb1-aa836364e517 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1947987, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1984.930710] env[62405]: DEBUG nova.network.neutron [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Updating instance_info_cache with network_info: [{"id": "2ba16494-2db9-4083-9a27-d4f12dac6ba1", "address": "fa:16:3e:66:df:57", "network": {"id": "bb161d6c-1986-486e-a6b7-5b1dacc985ee", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-590658377-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7541d8c77a3f434094bc30a4d402bfcb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ba16494-2d", "ovs_interfaceid": "2ba16494-2db9-4083-9a27-d4f12dac6ba1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1985.003642] env[62405]: DEBUG oslo_vmware.api [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': task-1947985, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.058163} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1985.004017] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1985.004221] env[62405]: DEBUG oslo_concurrency.lockutils [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6ee02a73-5729-47e1-93a1-23fefdcafc1e/6ee02a73-5729-47e1-93a1-23fefdcafc1e.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1985.004581] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/6ee02a73-5729-47e1-93a1-23fefdcafc1e/6ee02a73-5729-47e1-93a1-23fefdcafc1e.vmdk to [datastore1] f16e3d13-6db6-4f61-b0e4-661856a9166b/f16e3d13-6db6-4f61-b0e4-661856a9166b.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1985.008070] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-af97e692-98a0-43ab-93ad-4cf654f791ea {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.016560] env[62405]: DEBUG oslo_vmware.api [None req-5472e0a7-a69a-4211-86bb-156b0799f8ee tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1947981, 'name': PowerOnVM_Task, 'duration_secs': 1.052343} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1985.019564] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-5472e0a7-a69a-4211-86bb-156b0799f8ee tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: ec0a05fc-4a11-4e07-a03c-e357a7a750ab] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1985.020276] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-5472e0a7-a69a-4211-86bb-156b0799f8ee tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: ec0a05fc-4a11-4e07-a03c-e357a7a750ab] Updating instance 'ec0a05fc-4a11-4e07-a03c-e357a7a750ab' progress to 100 {{(pid=62405) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1985.024785] env[62405]: DEBUG oslo_vmware.api [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Waiting for the task: (returnval){ [ 1985.024785] env[62405]: value = "task-1947988" [ 1985.024785] env[62405]: _type = "Task" [ 1985.024785] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1985.029041] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1985.039386] env[62405]: DEBUG oslo_vmware.api [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': task-1947988, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1985.197086] env[62405]: DEBUG oslo_vmware.api [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52f328b8-e003-6281-380c-7d3b5dfd0b95, 'name': SearchDatastore_Task, 'duration_secs': 0.014647} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1985.197450] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1985.197662] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 5645c9a4-2640-4190-956f-00fc2ea03a3a] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1985.198208] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1985.198420] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1985.198623] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1985.198997] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-45cf15c5-b955-45a7-b233-d0faa9c66f4c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.212932] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1985.213177] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1985.213971] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c726670d-6bde-4f23-8f39-391d6c774ab7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.220412] env[62405]: DEBUG oslo_vmware.api [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Waiting for the task: (returnval){ [ 1985.220412] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52f28967-52cb-fc93-62f8-b3f939727631" [ 1985.220412] env[62405]: _type = "Task" [ 1985.220412] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1985.230838] env[62405]: DEBUG oslo_vmware.api [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52f28967-52cb-fc93-62f8-b3f939727631, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1985.300770] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 989a7146-71ea-433b-86f9-b7a0f0ee91b4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1985.302139] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Total usable vcpus: 48, total allocated vcpus: 20 {{(pid=62405) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1985.302139] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=4416MB phys_disk=200GB used_disk=20GB total_vcpus=48 used_vcpus=20 pci_stats=[] {{(pid=62405) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1985.311066] env[62405]: DEBUG oslo_concurrency.lockutils [None req-94fdf5db-04b1-4fab-bd2b-03c6f4f9055c tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Lock "d186b2f4-3fd1-44be-b8a4-080972aff3a0" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.101s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1985.317989] env[62405]: DEBUG nova.network.neutron [None req-2ad2a64f-16f2-4d10-8ae3-d9b120096747 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] Updating instance_info_cache with network_info: [{"id": "d440b728-2371-4e75-bb9f-2330f0318cae", "address": "fa:16:3e:ea:15:ee", "network": {"id": "8e7f7222-48db-4dd5-a9e8-9a6d2b598918", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1945720715-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.152", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba9083cddcc24345b6ea5d2cbbbec5ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd440b728-23", "ovs_interfaceid": "d440b728-2371-4e75-bb9f-2330f0318cae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e5b357f2-b442-4514-aa4d-9234dfa04642", "address": "fa:16:3e:75:31:be", "network": {"id": "8e7f7222-48db-4dd5-a9e8-9a6d2b598918", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1945720715-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba9083cddcc24345b6ea5d2cbbbec5ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape5b357f2-b4", "ovs_interfaceid": "e5b357f2-b442-4514-aa4d-9234dfa04642", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1985.428552] env[62405]: DEBUG oslo_vmware.api [None req-61a37609-b77f-4af5-abb1-aa836364e517 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1947987, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.473615} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1985.429533] env[62405]: DEBUG nova.network.neutron [req-a954133e-1387-4c44-9136-579944f5a665 req-41cd30fe-e3b4-4196-9482-f3ed3808e592 service nova] [instance: 5645c9a4-2640-4190-956f-00fc2ea03a3a] Updated VIF entry in instance network info cache for port 0830bd57-ad4c-4775-9f4b-826fad7b43ed. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1985.430046] env[62405]: DEBUG nova.network.neutron [req-a954133e-1387-4c44-9136-579944f5a665 req-41cd30fe-e3b4-4196-9482-f3ed3808e592 service nova] [instance: 5645c9a4-2640-4190-956f-00fc2ea03a3a] Updating instance_info_cache with network_info: [{"id": "0830bd57-ad4c-4775-9f4b-826fad7b43ed", "address": "fa:16:3e:ff:dc:40", "network": {"id": "c3707aa8-0488-4e47-a655-7bb7788995c0", "bridge": "br-int", "label": "tempest-ServersTestJSON-2054002489-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ba2fba100b943a2a415ec37b9365388", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "68ec9c06-8680-4a41-abad-cddbd1f768c9", "external-id": "nsx-vlan-transportzone-883", "segmentation_id": 883, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0830bd57-ad", "ovs_interfaceid": "0830bd57-ad4c-4775-9f4b-826fad7b43ed", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1985.434076] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-61a37609-b77f-4af5-abb1-aa836364e517 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1985.434607] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-61a37609-b77f-4af5-abb1-aa836364e517 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1985.434799] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-61a37609-b77f-4af5-abb1-aa836364e517 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1985.437802] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Releasing lock "refresh_cache-81d9be97-9147-4754-80c2-68c1a389842e" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1985.438640] env[62405]: DEBUG nova.compute.manager [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Instance network_info: |[{"id": "2ba16494-2db9-4083-9a27-d4f12dac6ba1", "address": "fa:16:3e:66:df:57", "network": {"id": "bb161d6c-1986-486e-a6b7-5b1dacc985ee", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-590658377-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7541d8c77a3f434094bc30a4d402bfcb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ba16494-2d", "ovs_interfaceid": "2ba16494-2db9-4083-9a27-d4f12dac6ba1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1985.439018] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:66:df:57', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a316376e-2ef0-4b1e-b40c-10321ebd7e1a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2ba16494-2db9-4083-9a27-d4f12dac6ba1', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1985.446787] env[62405]: DEBUG oslo.service.loopingcall [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1985.447756] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1985.448353] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9f2e8f36-884d-4c75-88fb-ba161f1f485b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.472647] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1985.472647] env[62405]: value = "task-1947989" [ 1985.472647] env[62405]: _type = "Task" [ 1985.472647] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1985.483659] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947989, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1985.547527] env[62405]: DEBUG oslo_vmware.api [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': task-1947988, 'name': CopyVirtualDisk_Task} progress is 9%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1985.667022] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48b1a105-f7af-4f1c-83e1-506854b8ecec {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.688828] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f990a8a-4d45-4fae-b98f-6edfa86638ca {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.733647] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a9625f9-0a8f-49b3-a89d-a71cf0b470a4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.747358] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0a11717-9153-448d-93e2-e4d815d44e01 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.754895] env[62405]: DEBUG oslo_vmware.api [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52f28967-52cb-fc93-62f8-b3f939727631, 'name': SearchDatastore_Task, 'duration_secs': 0.022271} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1985.756521] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f18ef11b-d983-48db-bd8c-764d90b6a507 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.762292] env[62405]: DEBUG oslo_vmware.api [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Waiting for the task: (returnval){ [ 1985.762292] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]527fe195-5fb7-6eac-583a-2abeb0f9d6e5" [ 1985.762292] env[62405]: _type = "Task" [ 1985.762292] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1985.771086] env[62405]: DEBUG nova.compute.provider_tree [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1985.783286] env[62405]: DEBUG oslo_vmware.api [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]527fe195-5fb7-6eac-583a-2abeb0f9d6e5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1985.821308] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2ad2a64f-16f2-4d10-8ae3-d9b120096747 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Releasing lock "refresh_cache-556e1bca-f2f1-4200-96df-997d48ce5a15" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1985.822161] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2ad2a64f-16f2-4d10-8ae3-d9b120096747 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquiring lock "556e1bca-f2f1-4200-96df-997d48ce5a15" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1985.822342] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2ad2a64f-16f2-4d10-8ae3-d9b120096747 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquired lock "556e1bca-f2f1-4200-96df-997d48ce5a15" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1985.823292] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a187cd92-29ca-4bbf-911f-556fedbaee9d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.848642] env[62405]: DEBUG nova.virt.hardware [None req-2ad2a64f-16f2-4d10-8ae3-d9b120096747 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1985.851049] env[62405]: DEBUG nova.virt.hardware [None req-2ad2a64f-16f2-4d10-8ae3-d9b120096747 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1985.851049] env[62405]: DEBUG nova.virt.hardware [None req-2ad2a64f-16f2-4d10-8ae3-d9b120096747 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1985.851049] env[62405]: DEBUG nova.virt.hardware [None req-2ad2a64f-16f2-4d10-8ae3-d9b120096747 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1985.851049] env[62405]: DEBUG nova.virt.hardware [None req-2ad2a64f-16f2-4d10-8ae3-d9b120096747 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1985.851049] env[62405]: DEBUG nova.virt.hardware [None req-2ad2a64f-16f2-4d10-8ae3-d9b120096747 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1985.851495] env[62405]: DEBUG nova.virt.hardware [None req-2ad2a64f-16f2-4d10-8ae3-d9b120096747 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1985.851556] env[62405]: DEBUG nova.virt.hardware [None req-2ad2a64f-16f2-4d10-8ae3-d9b120096747 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1985.852035] env[62405]: DEBUG nova.virt.hardware [None req-2ad2a64f-16f2-4d10-8ae3-d9b120096747 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1985.852363] env[62405]: DEBUG nova.virt.hardware [None req-2ad2a64f-16f2-4d10-8ae3-d9b120096747 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1985.852611] env[62405]: DEBUG nova.virt.hardware [None req-2ad2a64f-16f2-4d10-8ae3-d9b120096747 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1985.860204] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-2ad2a64f-16f2-4d10-8ae3-d9b120096747 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] Reconfiguring VM to attach interface {{(pid=62405) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1985.865499] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f827d8e7-fac9-4da0-9d5e-7ba576026944 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.891502] env[62405]: DEBUG oslo_vmware.api [None req-2ad2a64f-16f2-4d10-8ae3-d9b120096747 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Waiting for the task: (returnval){ [ 1985.891502] env[62405]: value = "task-1947990" [ 1985.891502] env[62405]: _type = "Task" [ 1985.891502] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1985.905851] env[62405]: DEBUG oslo_vmware.api [None req-2ad2a64f-16f2-4d10-8ae3-d9b120096747 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947990, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1985.939367] env[62405]: DEBUG oslo_concurrency.lockutils [req-a954133e-1387-4c44-9136-579944f5a665 req-41cd30fe-e3b4-4196-9482-f3ed3808e592 service nova] Releasing lock "refresh_cache-5645c9a4-2640-4190-956f-00fc2ea03a3a" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1985.987644] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947989, 'name': CreateVM_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1986.046454] env[62405]: DEBUG oslo_vmware.api [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': task-1947988, 'name': CopyVirtualDisk_Task} progress is 29%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1986.178708] env[62405]: DEBUG nova.objects.instance [None req-84ffd5f0-1098-4cf0-b1dd-b9ba82de21c7 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Lazy-loading 'flavor' on Instance uuid d186b2f4-3fd1-44be-b8a4-080972aff3a0 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1986.277871] env[62405]: DEBUG nova.scheduler.client.report [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1986.285128] env[62405]: DEBUG oslo_vmware.api [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]527fe195-5fb7-6eac-583a-2abeb0f9d6e5, 'name': SearchDatastore_Task, 'duration_secs': 0.086709} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1986.285737] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1986.285992] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 5645c9a4-2640-4190-956f-00fc2ea03a3a/5645c9a4-2640-4190-956f-00fc2ea03a3a.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1986.286279] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c28ec065-761b-46db-a846-6aaf6215177c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.297128] env[62405]: DEBUG oslo_vmware.api [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Waiting for the task: (returnval){ [ 1986.297128] env[62405]: value = "task-1947991" [ 1986.297128] env[62405]: _type = "Task" [ 1986.297128] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1986.313246] env[62405]: DEBUG oslo_vmware.api [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947991, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1986.405296] env[62405]: DEBUG oslo_vmware.api [None req-2ad2a64f-16f2-4d10-8ae3-d9b120096747 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947990, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1986.474867] env[62405]: DEBUG nova.virt.hardware [None req-61a37609-b77f-4af5-abb1-aa836364e517 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1986.475131] env[62405]: DEBUG nova.virt.hardware [None req-61a37609-b77f-4af5-abb1-aa836364e517 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1986.475294] env[62405]: DEBUG nova.virt.hardware [None req-61a37609-b77f-4af5-abb1-aa836364e517 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1986.475477] env[62405]: DEBUG nova.virt.hardware [None req-61a37609-b77f-4af5-abb1-aa836364e517 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1986.475627] env[62405]: DEBUG nova.virt.hardware [None req-61a37609-b77f-4af5-abb1-aa836364e517 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1986.475779] env[62405]: DEBUG nova.virt.hardware [None req-61a37609-b77f-4af5-abb1-aa836364e517 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1986.476113] env[62405]: DEBUG nova.virt.hardware [None req-61a37609-b77f-4af5-abb1-aa836364e517 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1986.476297] env[62405]: DEBUG nova.virt.hardware [None req-61a37609-b77f-4af5-abb1-aa836364e517 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1986.476469] env[62405]: DEBUG nova.virt.hardware [None req-61a37609-b77f-4af5-abb1-aa836364e517 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1986.476685] env[62405]: DEBUG nova.virt.hardware [None req-61a37609-b77f-4af5-abb1-aa836364e517 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1986.476817] env[62405]: DEBUG nova.virt.hardware [None req-61a37609-b77f-4af5-abb1-aa836364e517 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1986.478058] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ec2989b-469b-46d0-ac17-6b7d8cbdc543 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.492886] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947989, 'name': CreateVM_Task, 'duration_secs': 0.554348} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1986.495299] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1986.495883] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1986.496073] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1986.496399] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1986.498046] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a24e1bc2-cb51-4759-9757-6e2e46fc9eff {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.502147] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3512649d-e75f-48c3-9938-642a936a9f08 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.507774] env[62405]: DEBUG oslo_vmware.api [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Waiting for the task: (returnval){ [ 1986.507774] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52d83a14-f298-a9d1-2242-7cb1ccc0bda9" [ 1986.507774] env[62405]: _type = "Task" [ 1986.507774] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1986.519211] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-61a37609-b77f-4af5-abb1-aa836364e517 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5c:57:a9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6399297e-11b6-47b0-9a9f-712bb90b6ea1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '04edbefd-e96c-47d6-bfd7-72fb2a759156', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1986.526780] env[62405]: DEBUG oslo.service.loopingcall [None req-61a37609-b77f-4af5-abb1-aa836364e517 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1986.527849] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1986.527849] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d1297969-fddf-433a-b8fe-6a0129b4fb5f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.552135] env[62405]: DEBUG oslo_vmware.api [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52d83a14-f298-a9d1-2242-7cb1ccc0bda9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1986.561761] env[62405]: DEBUG nova.compute.manager [req-1b3d7993-3271-448d-877a-6352dd90f0d1 req-e523f29c-37df-4f17-8718-c6e678090b8c service nova] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] Received event network-vif-plugged-e5b357f2-b442-4514-aa4d-9234dfa04642 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1986.562078] env[62405]: DEBUG oslo_concurrency.lockutils [req-1b3d7993-3271-448d-877a-6352dd90f0d1 req-e523f29c-37df-4f17-8718-c6e678090b8c service nova] Acquiring lock "556e1bca-f2f1-4200-96df-997d48ce5a15-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1986.562417] env[62405]: DEBUG oslo_concurrency.lockutils [req-1b3d7993-3271-448d-877a-6352dd90f0d1 req-e523f29c-37df-4f17-8718-c6e678090b8c service nova] Lock "556e1bca-f2f1-4200-96df-997d48ce5a15-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1986.562719] env[62405]: DEBUG oslo_concurrency.lockutils [req-1b3d7993-3271-448d-877a-6352dd90f0d1 req-e523f29c-37df-4f17-8718-c6e678090b8c service nova] Lock "556e1bca-f2f1-4200-96df-997d48ce5a15-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1986.563047] env[62405]: DEBUG nova.compute.manager [req-1b3d7993-3271-448d-877a-6352dd90f0d1 req-e523f29c-37df-4f17-8718-c6e678090b8c service nova] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] No waiting events found dispatching network-vif-plugged-e5b357f2-b442-4514-aa4d-9234dfa04642 {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1986.563262] env[62405]: WARNING nova.compute.manager [req-1b3d7993-3271-448d-877a-6352dd90f0d1 req-e523f29c-37df-4f17-8718-c6e678090b8c service nova] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] Received unexpected event network-vif-plugged-e5b357f2-b442-4514-aa4d-9234dfa04642 for instance with vm_state active and task_state None. [ 1986.563457] env[62405]: DEBUG nova.compute.manager [req-1b3d7993-3271-448d-877a-6352dd90f0d1 req-e523f29c-37df-4f17-8718-c6e678090b8c service nova] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Received event network-changed-2ba16494-2db9-4083-9a27-d4f12dac6ba1 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1986.563678] env[62405]: DEBUG nova.compute.manager [req-1b3d7993-3271-448d-877a-6352dd90f0d1 req-e523f29c-37df-4f17-8718-c6e678090b8c service nova] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Refreshing instance network info cache due to event network-changed-2ba16494-2db9-4083-9a27-d4f12dac6ba1. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1986.563904] env[62405]: DEBUG oslo_concurrency.lockutils [req-1b3d7993-3271-448d-877a-6352dd90f0d1 req-e523f29c-37df-4f17-8718-c6e678090b8c service nova] Acquiring lock "refresh_cache-81d9be97-9147-4754-80c2-68c1a389842e" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1986.564086] env[62405]: DEBUG oslo_concurrency.lockutils [req-1b3d7993-3271-448d-877a-6352dd90f0d1 req-e523f29c-37df-4f17-8718-c6e678090b8c service nova] Acquired lock "refresh_cache-81d9be97-9147-4754-80c2-68c1a389842e" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1986.564334] env[62405]: DEBUG nova.network.neutron [req-1b3d7993-3271-448d-877a-6352dd90f0d1 req-e523f29c-37df-4f17-8718-c6e678090b8c service nova] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Refreshing network info cache for port 2ba16494-2db9-4083-9a27-d4f12dac6ba1 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1986.568640] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1986.568640] env[62405]: value = "task-1947992" [ 1986.568640] env[62405]: _type = "Task" [ 1986.568640] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1986.575885] env[62405]: DEBUG oslo_vmware.api [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': task-1947988, 'name': CopyVirtualDisk_Task} progress is 52%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1986.584768] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947992, 'name': CreateVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1986.686368] env[62405]: DEBUG oslo_concurrency.lockutils [None req-84ffd5f0-1098-4cf0-b1dd-b9ba82de21c7 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Acquiring lock "refresh_cache-d186b2f4-3fd1-44be-b8a4-080972aff3a0" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1986.686598] env[62405]: DEBUG oslo_concurrency.lockutils [None req-84ffd5f0-1098-4cf0-b1dd-b9ba82de21c7 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Acquired lock "refresh_cache-d186b2f4-3fd1-44be-b8a4-080972aff3a0" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1986.686826] env[62405]: DEBUG nova.network.neutron [None req-84ffd5f0-1098-4cf0-b1dd-b9ba82de21c7 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1986.687177] env[62405]: DEBUG nova.objects.instance [None req-84ffd5f0-1098-4cf0-b1dd-b9ba82de21c7 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Lazy-loading 'info_cache' on Instance uuid d186b2f4-3fd1-44be-b8a4-080972aff3a0 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1986.787563] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62405) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1986.787909] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.083s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1986.788237] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.759s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1986.790089] env[62405]: INFO nova.compute.claims [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] [instance: 989a7146-71ea-433b-86f9-b7a0f0ee91b4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1986.812329] env[62405]: DEBUG oslo_vmware.api [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947991, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1986.906214] env[62405]: DEBUG oslo_vmware.api [None req-2ad2a64f-16f2-4d10-8ae3-d9b120096747 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947990, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1987.022102] env[62405]: DEBUG oslo_vmware.api [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52d83a14-f298-a9d1-2242-7cb1ccc0bda9, 'name': SearchDatastore_Task, 'duration_secs': 0.090089} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1987.022424] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1987.022663] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1987.022926] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1987.023090] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1987.023298] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1987.023587] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c263efee-4df7-4ca0-be0a-a179bcd362f0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1987.040312] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1987.040535] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1987.041545] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1199cc8b-b8b2-452a-aed7-37c6b0a80fb3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1987.056380] env[62405]: DEBUG oslo_vmware.api [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Waiting for the task: (returnval){ [ 1987.056380] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52704caf-6bd4-f8c1-8de8-874e29ba87f3" [ 1987.056380] env[62405]: _type = "Task" [ 1987.056380] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1987.062051] env[62405]: DEBUG oslo_vmware.api [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': task-1947988, 'name': CopyVirtualDisk_Task} progress is 71%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1987.073431] env[62405]: DEBUG oslo_vmware.api [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52704caf-6bd4-f8c1-8de8-874e29ba87f3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1987.092160] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947992, 'name': CreateVM_Task} progress is 25%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1987.192206] env[62405]: DEBUG nova.objects.base [None req-84ffd5f0-1098-4cf0-b1dd-b9ba82de21c7 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=62405) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1987.311961] env[62405]: DEBUG oslo_vmware.api [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947991, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1987.412623] env[62405]: DEBUG oslo_vmware.api [None req-2ad2a64f-16f2-4d10-8ae3-d9b120096747 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947990, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1987.493083] env[62405]: DEBUG nova.network.neutron [req-1b3d7993-3271-448d-877a-6352dd90f0d1 req-e523f29c-37df-4f17-8718-c6e678090b8c service nova] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Updated VIF entry in instance network info cache for port 2ba16494-2db9-4083-9a27-d4f12dac6ba1. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1987.493844] env[62405]: DEBUG nova.network.neutron [req-1b3d7993-3271-448d-877a-6352dd90f0d1 req-e523f29c-37df-4f17-8718-c6e678090b8c service nova] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Updating instance_info_cache with network_info: [{"id": "2ba16494-2db9-4083-9a27-d4f12dac6ba1", "address": "fa:16:3e:66:df:57", "network": {"id": "bb161d6c-1986-486e-a6b7-5b1dacc985ee", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-590658377-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7541d8c77a3f434094bc30a4d402bfcb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ba16494-2d", "ovs_interfaceid": "2ba16494-2db9-4083-9a27-d4f12dac6ba1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1987.563294] env[62405]: DEBUG oslo_vmware.api [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': task-1947988, 'name': CopyVirtualDisk_Task} progress is 91%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1987.569199] env[62405]: DEBUG oslo_vmware.api [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52704caf-6bd4-f8c1-8de8-874e29ba87f3, 'name': SearchDatastore_Task, 'duration_secs': 0.095533} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1987.570071] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d5c05846-6c14-4e66-8e46-d4c4cae2b0f0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1987.579042] env[62405]: DEBUG oslo_vmware.api [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Waiting for the task: (returnval){ [ 1987.579042] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52f2550c-8964-91b8-7f79-fda9b2b8db4a" [ 1987.579042] env[62405]: _type = "Task" [ 1987.579042] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1987.585491] env[62405]: DEBUG oslo_concurrency.lockutils [None req-361f3b8b-f11b-417c-a68d-323aa69af7eb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquiring lock "d937c90c-10b2-4c57-b1db-7b433c3d9017" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1987.585752] env[62405]: DEBUG oslo_concurrency.lockutils [None req-361f3b8b-f11b-417c-a68d-323aa69af7eb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Lock "d937c90c-10b2-4c57-b1db-7b433c3d9017" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1987.585980] env[62405]: DEBUG oslo_concurrency.lockutils [None req-361f3b8b-f11b-417c-a68d-323aa69af7eb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquiring lock "d937c90c-10b2-4c57-b1db-7b433c3d9017-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1987.586197] env[62405]: DEBUG oslo_concurrency.lockutils [None req-361f3b8b-f11b-417c-a68d-323aa69af7eb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Lock "d937c90c-10b2-4c57-b1db-7b433c3d9017-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1987.586370] env[62405]: DEBUG oslo_concurrency.lockutils [None req-361f3b8b-f11b-417c-a68d-323aa69af7eb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Lock "d937c90c-10b2-4c57-b1db-7b433c3d9017-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1987.589898] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947992, 'name': CreateVM_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1987.591861] env[62405]: DEBUG oslo_vmware.api [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52f2550c-8964-91b8-7f79-fda9b2b8db4a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1987.592433] env[62405]: INFO nova.compute.manager [None req-361f3b8b-f11b-417c-a68d-323aa69af7eb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: d937c90c-10b2-4c57-b1db-7b433c3d9017] Terminating instance [ 1987.817486] env[62405]: DEBUG oslo_vmware.api [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947991, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1987.911179] env[62405]: DEBUG oslo_vmware.api [None req-2ad2a64f-16f2-4d10-8ae3-d9b120096747 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947990, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1987.997039] env[62405]: DEBUG oslo_concurrency.lockutils [req-1b3d7993-3271-448d-877a-6352dd90f0d1 req-e523f29c-37df-4f17-8718-c6e678090b8c service nova] Releasing lock "refresh_cache-81d9be97-9147-4754-80c2-68c1a389842e" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1987.997190] env[62405]: DEBUG nova.compute.manager [req-1b3d7993-3271-448d-877a-6352dd90f0d1 req-e523f29c-37df-4f17-8718-c6e678090b8c service nova] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] Received event network-changed-e5b357f2-b442-4514-aa4d-9234dfa04642 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1987.997900] env[62405]: DEBUG nova.compute.manager [req-1b3d7993-3271-448d-877a-6352dd90f0d1 req-e523f29c-37df-4f17-8718-c6e678090b8c service nova] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] Refreshing instance network info cache due to event network-changed-e5b357f2-b442-4514-aa4d-9234dfa04642. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1987.998205] env[62405]: DEBUG oslo_concurrency.lockutils [req-1b3d7993-3271-448d-877a-6352dd90f0d1 req-e523f29c-37df-4f17-8718-c6e678090b8c service nova] Acquiring lock "refresh_cache-556e1bca-f2f1-4200-96df-997d48ce5a15" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1987.998358] env[62405]: DEBUG oslo_concurrency.lockutils [req-1b3d7993-3271-448d-877a-6352dd90f0d1 req-e523f29c-37df-4f17-8718-c6e678090b8c service nova] Acquired lock "refresh_cache-556e1bca-f2f1-4200-96df-997d48ce5a15" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1987.998568] env[62405]: DEBUG nova.network.neutron [req-1b3d7993-3271-448d-877a-6352dd90f0d1 req-e523f29c-37df-4f17-8718-c6e678090b8c service nova] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] Refreshing network info cache for port e5b357f2-b442-4514-aa4d-9234dfa04642 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1988.062434] env[62405]: DEBUG oslo_vmware.api [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': task-1947988, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.737504} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1988.062710] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/6ee02a73-5729-47e1-93a1-23fefdcafc1e/6ee02a73-5729-47e1-93a1-23fefdcafc1e.vmdk to [datastore1] f16e3d13-6db6-4f61-b0e4-661856a9166b/f16e3d13-6db6-4f61-b0e4-661856a9166b.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1988.063633] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-851e281b-a0ce-44f5-82cc-6cf2949965d9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.068936] env[62405]: DEBUG nova.network.neutron [None req-84ffd5f0-1098-4cf0-b1dd-b9ba82de21c7 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Updating instance_info_cache with network_info: [{"id": "995727bb-89db-40f7-a02b-916afa2c9641", "address": "fa:16:3e:15:17:ac", "network": {"id": "2019f333-b70a-4976-97ee-8748220e1f48", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-558435229-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.220", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "633b4e729a054bc69593b789af9ee070", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb143ef7-8271-4a8a-a4aa-8eba9a89f6a1", "external-id": "nsx-vlan-transportzone-504", "segmentation_id": 504, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap995727bb-89", "ovs_interfaceid": "995727bb-89db-40f7-a02b-916afa2c9641", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1988.093583] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Reconfiguring VM instance instance-00000048 to attach disk [datastore1] f16e3d13-6db6-4f61-b0e4-661856a9166b/f16e3d13-6db6-4f61-b0e4-661856a9166b.vmdk or device None with type streamOptimized {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1988.107597] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c2e104ac-6c36-4308-becd-4bc37ee86aad {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.124900] env[62405]: DEBUG nova.compute.manager [None req-361f3b8b-f11b-417c-a68d-323aa69af7eb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: d937c90c-10b2-4c57-b1db-7b433c3d9017] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1988.125188] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-361f3b8b-f11b-417c-a68d-323aa69af7eb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: d937c90c-10b2-4c57-b1db-7b433c3d9017] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1988.126881] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-867b31d3-1211-4ef1-9535-596d4968bf12 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.141272] env[62405]: DEBUG oslo_vmware.api [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52f2550c-8964-91b8-7f79-fda9b2b8db4a, 'name': SearchDatastore_Task, 'duration_secs': 0.090478} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1988.151322] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1988.151936] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 81d9be97-9147-4754-80c2-68c1a389842e/81d9be97-9147-4754-80c2-68c1a389842e.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1988.152315] env[62405]: DEBUG oslo_vmware.api [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Waiting for the task: (returnval){ [ 1988.152315] env[62405]: value = "task-1947993" [ 1988.152315] env[62405]: _type = "Task" [ 1988.152315] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1988.152564] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1947992, 'name': CreateVM_Task, 'duration_secs': 1.17498} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1988.153375] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-361f3b8b-f11b-417c-a68d-323aa69af7eb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: d937c90c-10b2-4c57-b1db-7b433c3d9017] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1988.157447] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1e5f32e1-1d0d-43a2-aaf4-b8fecf8e2d98 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.161328] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1988.161550] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5a6f4cea-f302-4a4e-af27-57f15107cb58 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.169128] env[62405]: DEBUG oslo_concurrency.lockutils [None req-61a37609-b77f-4af5-abb1-aa836364e517 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1988.169381] env[62405]: DEBUG oslo_concurrency.lockutils [None req-61a37609-b77f-4af5-abb1-aa836364e517 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1988.169900] env[62405]: DEBUG oslo_concurrency.lockutils [None req-61a37609-b77f-4af5-abb1-aa836364e517 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1988.171359] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-67bebc23-1d73-46ba-b02d-fd805aad6cb7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.177309] env[62405]: DEBUG oslo_vmware.api [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Waiting for the task: (returnval){ [ 1988.177309] env[62405]: value = "task-1947994" [ 1988.177309] env[62405]: _type = "Task" [ 1988.177309] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1988.183977] env[62405]: DEBUG oslo_vmware.api [None req-361f3b8b-f11b-417c-a68d-323aa69af7eb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for the task: (returnval){ [ 1988.183977] env[62405]: value = "task-1947995" [ 1988.183977] env[62405]: _type = "Task" [ 1988.183977] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1988.184714] env[62405]: DEBUG oslo_concurrency.lockutils [None req-92207ad2-6b2b-4ddd-a538-b0f084ab5682 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquiring lock "ec0a05fc-4a11-4e07-a03c-e357a7a750ab" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1988.185075] env[62405]: DEBUG oslo_concurrency.lockutils [None req-92207ad2-6b2b-4ddd-a538-b0f084ab5682 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Lock "ec0a05fc-4a11-4e07-a03c-e357a7a750ab" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1988.185417] env[62405]: DEBUG nova.compute.manager [None req-92207ad2-6b2b-4ddd-a538-b0f084ab5682 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: ec0a05fc-4a11-4e07-a03c-e357a7a750ab] Going to confirm migration 6 {{(pid=62405) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1988.200483] env[62405]: DEBUG oslo_vmware.api [None req-61a37609-b77f-4af5-abb1-aa836364e517 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Waiting for the task: (returnval){ [ 1988.200483] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52510d22-e76b-ec71-de66-80b6c063a599" [ 1988.200483] env[62405]: _type = "Task" [ 1988.200483] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1988.201957] env[62405]: DEBUG oslo_vmware.api [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': task-1947993, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1988.224165] env[62405]: DEBUG oslo_vmware.api [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1947994, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1988.230912] env[62405]: DEBUG oslo_vmware.api [None req-361f3b8b-f11b-417c-a68d-323aa69af7eb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947995, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1988.237468] env[62405]: DEBUG oslo_vmware.api [None req-61a37609-b77f-4af5-abb1-aa836364e517 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52510d22-e76b-ec71-de66-80b6c063a599, 'name': SearchDatastore_Task, 'duration_secs': 0.01404} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1988.242180] env[62405]: DEBUG oslo_concurrency.lockutils [None req-61a37609-b77f-4af5-abb1-aa836364e517 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1988.242585] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-61a37609-b77f-4af5-abb1-aa836364e517 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1988.242936] env[62405]: DEBUG oslo_concurrency.lockutils [None req-61a37609-b77f-4af5-abb1-aa836364e517 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1988.243229] env[62405]: DEBUG oslo_concurrency.lockutils [None req-61a37609-b77f-4af5-abb1-aa836364e517 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1988.243520] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-61a37609-b77f-4af5-abb1-aa836364e517 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1988.244846] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-07650abc-113f-4f6c-9381-11ea9ca0e693 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.255636] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-61a37609-b77f-4af5-abb1-aa836364e517 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1988.255991] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-61a37609-b77f-4af5-abb1-aa836364e517 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1988.260993] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a9631438-bd0c-4bea-8938-2fb9a2ac311b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.270271] env[62405]: DEBUG oslo_vmware.api [None req-61a37609-b77f-4af5-abb1-aa836364e517 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Waiting for the task: (returnval){ [ 1988.270271] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52188f51-b37c-9295-de64-65610c7e8ff6" [ 1988.270271] env[62405]: _type = "Task" [ 1988.270271] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1988.289344] env[62405]: DEBUG oslo_vmware.api [None req-61a37609-b77f-4af5-abb1-aa836364e517 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52188f51-b37c-9295-de64-65610c7e8ff6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1988.296728] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5d255a1-8c5a-41c1-9d3d-ad375d128830 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.311085] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c18a3407-2de9-401e-addc-04e139aba0b6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.318697] env[62405]: DEBUG oslo_vmware.api [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947991, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1988.347174] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27c98e0b-9a59-40b2-b849-ae9cf4fe2d7f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.355670] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3c7096c-f1f5-43f3-a102-78748a97183b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.371362] env[62405]: DEBUG nova.compute.provider_tree [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1988.407706] env[62405]: DEBUG oslo_vmware.api [None req-2ad2a64f-16f2-4d10-8ae3-d9b120096747 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1947990, 'name': ReconfigVM_Task, 'duration_secs': 2.220579} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1988.407706] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2ad2a64f-16f2-4d10-8ae3-d9b120096747 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Releasing lock "556e1bca-f2f1-4200-96df-997d48ce5a15" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1988.407706] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-2ad2a64f-16f2-4d10-8ae3-d9b120096747 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] Reconfigured VM to attach interface {{(pid=62405) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1988.572543] env[62405]: DEBUG oslo_concurrency.lockutils [None req-84ffd5f0-1098-4cf0-b1dd-b9ba82de21c7 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Releasing lock "refresh_cache-d186b2f4-3fd1-44be-b8a4-080972aff3a0" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1988.681463] env[62405]: DEBUG oslo_vmware.api [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': task-1947993, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1988.697703] env[62405]: DEBUG oslo_vmware.api [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1947994, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1988.713324] env[62405]: DEBUG oslo_vmware.api [None req-361f3b8b-f11b-417c-a68d-323aa69af7eb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947995, 'name': PowerOffVM_Task, 'duration_secs': 0.398341} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1988.713590] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-361f3b8b-f11b-417c-a68d-323aa69af7eb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: d937c90c-10b2-4c57-b1db-7b433c3d9017] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1988.713769] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-361f3b8b-f11b-417c-a68d-323aa69af7eb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: d937c90c-10b2-4c57-b1db-7b433c3d9017] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1988.716019] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-50cced93-f553-4f3f-ae37-a182aa314e13 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.781156] env[62405]: DEBUG oslo_vmware.api [None req-61a37609-b77f-4af5-abb1-aa836364e517 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52188f51-b37c-9295-de64-65610c7e8ff6, 'name': SearchDatastore_Task, 'duration_secs': 0.020882} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1988.781976] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f1a70a04-8d89-4f80-aa17-c9c666bbd90f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.785229] env[62405]: DEBUG nova.network.neutron [req-1b3d7993-3271-448d-877a-6352dd90f0d1 req-e523f29c-37df-4f17-8718-c6e678090b8c service nova] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] Updated VIF entry in instance network info cache for port e5b357f2-b442-4514-aa4d-9234dfa04642. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1988.785632] env[62405]: DEBUG nova.network.neutron [req-1b3d7993-3271-448d-877a-6352dd90f0d1 req-e523f29c-37df-4f17-8718-c6e678090b8c service nova] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] Updating instance_info_cache with network_info: [{"id": "d440b728-2371-4e75-bb9f-2330f0318cae", "address": "fa:16:3e:ea:15:ee", "network": {"id": "8e7f7222-48db-4dd5-a9e8-9a6d2b598918", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1945720715-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.152", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba9083cddcc24345b6ea5d2cbbbec5ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd440b728-23", "ovs_interfaceid": "d440b728-2371-4e75-bb9f-2330f0318cae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e5b357f2-b442-4514-aa4d-9234dfa04642", "address": "fa:16:3e:75:31:be", "network": {"id": "8e7f7222-48db-4dd5-a9e8-9a6d2b598918", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1945720715-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba9083cddcc24345b6ea5d2cbbbec5ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape5b357f2-b4", "ovs_interfaceid": "e5b357f2-b442-4514-aa4d-9234dfa04642", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1988.790635] env[62405]: DEBUG oslo_vmware.api [None req-61a37609-b77f-4af5-abb1-aa836364e517 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Waiting for the task: (returnval){ [ 1988.790635] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5250d31e-478e-ec16-02b0-205326acc748" [ 1988.790635] env[62405]: _type = "Task" [ 1988.790635] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1988.801805] env[62405]: DEBUG oslo_vmware.api [None req-61a37609-b77f-4af5-abb1-aa836364e517 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5250d31e-478e-ec16-02b0-205326acc748, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1988.809837] env[62405]: DEBUG oslo_concurrency.lockutils [None req-92207ad2-6b2b-4ddd-a538-b0f084ab5682 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquiring lock "refresh_cache-ec0a05fc-4a11-4e07-a03c-e357a7a750ab" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1988.810150] env[62405]: DEBUG oslo_concurrency.lockutils [None req-92207ad2-6b2b-4ddd-a538-b0f084ab5682 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquired lock "refresh_cache-ec0a05fc-4a11-4e07-a03c-e357a7a750ab" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1988.810942] env[62405]: DEBUG nova.network.neutron [None req-92207ad2-6b2b-4ddd-a538-b0f084ab5682 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: ec0a05fc-4a11-4e07-a03c-e357a7a750ab] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1988.810942] env[62405]: DEBUG nova.objects.instance [None req-92207ad2-6b2b-4ddd-a538-b0f084ab5682 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Lazy-loading 'info_cache' on Instance uuid ec0a05fc-4a11-4e07-a03c-e357a7a750ab {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1988.817469] env[62405]: DEBUG oslo_vmware.api [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947991, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1988.875155] env[62405]: DEBUG nova.scheduler.client.report [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1988.915852] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2ad2a64f-16f2-4d10-8ae3-d9b120096747 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Lock "interface-556e1bca-f2f1-4200-96df-997d48ce5a15-e5b357f2-b442-4514-aa4d-9234dfa04642" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 8.880s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1989.052824] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-361f3b8b-f11b-417c-a68d-323aa69af7eb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: d937c90c-10b2-4c57-b1db-7b433c3d9017] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1989.053127] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-361f3b8b-f11b-417c-a68d-323aa69af7eb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: d937c90c-10b2-4c57-b1db-7b433c3d9017] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1989.053350] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-361f3b8b-f11b-417c-a68d-323aa69af7eb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Deleting the datastore file [datastore1] d937c90c-10b2-4c57-b1db-7b433c3d9017 {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1989.053652] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4c4d76e0-e1e1-41f7-a6c6-aca0e21e3ff6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.062706] env[62405]: DEBUG oslo_vmware.api [None req-361f3b8b-f11b-417c-a68d-323aa69af7eb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for the task: (returnval){ [ 1989.062706] env[62405]: value = "task-1947997" [ 1989.062706] env[62405]: _type = "Task" [ 1989.062706] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1989.071639] env[62405]: DEBUG oslo_vmware.api [None req-361f3b8b-f11b-417c-a68d-323aa69af7eb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947997, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1989.176748] env[62405]: DEBUG oslo_vmware.api [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': task-1947993, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1989.193091] env[62405]: DEBUG oslo_vmware.api [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1947994, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1989.292731] env[62405]: DEBUG oslo_concurrency.lockutils [req-1b3d7993-3271-448d-877a-6352dd90f0d1 req-e523f29c-37df-4f17-8718-c6e678090b8c service nova] Releasing lock "refresh_cache-556e1bca-f2f1-4200-96df-997d48ce5a15" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1989.303343] env[62405]: DEBUG oslo_vmware.api [None req-61a37609-b77f-4af5-abb1-aa836364e517 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5250d31e-478e-ec16-02b0-205326acc748, 'name': SearchDatastore_Task, 'duration_secs': 0.036414} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1989.306616] env[62405]: DEBUG oslo_concurrency.lockutils [None req-61a37609-b77f-4af5-abb1-aa836364e517 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1989.307064] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-61a37609-b77f-4af5-abb1-aa836364e517 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d/78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1989.307378] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-632e4325-3e07-4730-a247-82eda73d992c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.315084] env[62405]: DEBUG oslo_vmware.api [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947991, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.848234} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1989.316324] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 5645c9a4-2640-4190-956f-00fc2ea03a3a/5645c9a4-2640-4190-956f-00fc2ea03a3a.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1989.316540] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 5645c9a4-2640-4190-956f-00fc2ea03a3a] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1989.316863] env[62405]: DEBUG oslo_vmware.api [None req-61a37609-b77f-4af5-abb1-aa836364e517 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Waiting for the task: (returnval){ [ 1989.316863] env[62405]: value = "task-1947998" [ 1989.316863] env[62405]: _type = "Task" [ 1989.316863] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1989.317073] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-341708e8-7b0c-4ca5-a38f-9bfd5ff8f4b3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.329695] env[62405]: DEBUG oslo_vmware.api [None req-61a37609-b77f-4af5-abb1-aa836364e517 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1947998, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1989.330860] env[62405]: DEBUG oslo_vmware.api [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Waiting for the task: (returnval){ [ 1989.330860] env[62405]: value = "task-1947999" [ 1989.330860] env[62405]: _type = "Task" [ 1989.330860] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1989.342646] env[62405]: DEBUG oslo_vmware.api [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947999, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1989.383409] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.595s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1989.384188] env[62405]: DEBUG nova.compute.manager [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] [instance: 989a7146-71ea-433b-86f9-b7a0f0ee91b4] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1989.576122] env[62405]: DEBUG oslo_vmware.api [None req-361f3b8b-f11b-417c-a68d-323aa69af7eb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947997, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1989.581292] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-84ffd5f0-1098-4cf0-b1dd-b9ba82de21c7 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1989.581904] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a551755f-eee9-455e-b63d-e26536eacfc7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.593292] env[62405]: DEBUG oslo_vmware.api [None req-84ffd5f0-1098-4cf0-b1dd-b9ba82de21c7 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Waiting for the task: (returnval){ [ 1989.593292] env[62405]: value = "task-1948000" [ 1989.593292] env[62405]: _type = "Task" [ 1989.593292] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1989.607340] env[62405]: DEBUG oslo_vmware.api [None req-84ffd5f0-1098-4cf0-b1dd-b9ba82de21c7 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Task: {'id': task-1948000, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1989.679532] env[62405]: DEBUG oslo_vmware.api [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': task-1947993, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1989.698142] env[62405]: DEBUG oslo_vmware.api [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1947994, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1989.830602] env[62405]: DEBUG oslo_vmware.api [None req-61a37609-b77f-4af5-abb1-aa836364e517 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1947998, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1989.840369] env[62405]: DEBUG oslo_vmware.api [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1947999, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.390227} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1989.840666] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 5645c9a4-2640-4190-956f-00fc2ea03a3a] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1989.841539] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13151515-8896-4708-ba8d-7d6b6561735c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.866770] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 5645c9a4-2640-4190-956f-00fc2ea03a3a] Reconfiguring VM instance instance-00000069 to attach disk [datastore1] 5645c9a4-2640-4190-956f-00fc2ea03a3a/5645c9a4-2640-4190-956f-00fc2ea03a3a.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1989.867192] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5786e0ff-40d3-48b1-81c4-5572f73ef7f6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.890464] env[62405]: DEBUG nova.compute.utils [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1989.891893] env[62405]: DEBUG oslo_vmware.api [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Waiting for the task: (returnval){ [ 1989.891893] env[62405]: value = "task-1948001" [ 1989.891893] env[62405]: _type = "Task" [ 1989.891893] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1989.892480] env[62405]: DEBUG nova.compute.manager [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] [instance: 989a7146-71ea-433b-86f9-b7a0f0ee91b4] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1989.892676] env[62405]: DEBUG nova.network.neutron [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] [instance: 989a7146-71ea-433b-86f9-b7a0f0ee91b4] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1989.906419] env[62405]: DEBUG oslo_vmware.api [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1948001, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1990.077512] env[62405]: DEBUG oslo_vmware.api [None req-361f3b8b-f11b-417c-a68d-323aa69af7eb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Task: {'id': task-1947997, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.538341} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1990.077960] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-361f3b8b-f11b-417c-a68d-323aa69af7eb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1990.078211] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-361f3b8b-f11b-417c-a68d-323aa69af7eb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: d937c90c-10b2-4c57-b1db-7b433c3d9017] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1990.078501] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-361f3b8b-f11b-417c-a68d-323aa69af7eb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: d937c90c-10b2-4c57-b1db-7b433c3d9017] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1990.078758] env[62405]: INFO nova.compute.manager [None req-361f3b8b-f11b-417c-a68d-323aa69af7eb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] [instance: d937c90c-10b2-4c57-b1db-7b433c3d9017] Took 1.95 seconds to destroy the instance on the hypervisor. [ 1990.079199] env[62405]: DEBUG oslo.service.loopingcall [None req-361f3b8b-f11b-417c-a68d-323aa69af7eb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1990.079469] env[62405]: DEBUG nova.compute.manager [-] [instance: d937c90c-10b2-4c57-b1db-7b433c3d9017] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1990.079628] env[62405]: DEBUG nova.network.neutron [-] [instance: d937c90c-10b2-4c57-b1db-7b433c3d9017] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1990.104894] env[62405]: DEBUG oslo_vmware.api [None req-84ffd5f0-1098-4cf0-b1dd-b9ba82de21c7 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Task: {'id': task-1948000, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1990.178604] env[62405]: DEBUG oslo_vmware.api [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': task-1947993, 'name': ReconfigVM_Task, 'duration_secs': 1.814116} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1990.180439] env[62405]: DEBUG nova.policy [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '220a22a778524e3e837e924a927e839b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8c96fbdce2e94fc9831663c0ff069ef9', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 1990.182281] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Reconfigured VM instance instance-00000048 to attach disk [datastore1] f16e3d13-6db6-4f61-b0e4-661856a9166b/f16e3d13-6db6-4f61-b0e4-661856a9166b.vmdk or device None with type streamOptimized {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1990.183081] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-20475ded-baf0-4580-a47f-8ff66e76da0e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1990.195781] env[62405]: DEBUG oslo_vmware.api [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1947994, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.609945} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1990.197287] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 81d9be97-9147-4754-80c2-68c1a389842e/81d9be97-9147-4754-80c2-68c1a389842e.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1990.197528] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1990.197873] env[62405]: DEBUG oslo_vmware.api [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Waiting for the task: (returnval){ [ 1990.197873] env[62405]: value = "task-1948002" [ 1990.197873] env[62405]: _type = "Task" [ 1990.197873] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1990.198235] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d253fb1f-64c2-4d4b-b973-5ec4cf213d7c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1990.210766] env[62405]: DEBUG oslo_vmware.api [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': task-1948002, 'name': Rename_Task} progress is 10%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1990.212385] env[62405]: DEBUG oslo_vmware.api [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Waiting for the task: (returnval){ [ 1990.212385] env[62405]: value = "task-1948003" [ 1990.212385] env[62405]: _type = "Task" [ 1990.212385] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1990.220536] env[62405]: DEBUG oslo_vmware.api [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1948003, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1990.331256] env[62405]: DEBUG oslo_vmware.api [None req-61a37609-b77f-4af5-abb1-aa836364e517 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1947998, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.901608} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1990.331830] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-61a37609-b77f-4af5-abb1-aa836364e517 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d/78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1990.331830] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-61a37609-b77f-4af5-abb1-aa836364e517 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1990.332128] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-28e9a0c4-d379-4b3b-9670-321feba21d7e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1990.339684] env[62405]: DEBUG oslo_vmware.api [None req-61a37609-b77f-4af5-abb1-aa836364e517 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Waiting for the task: (returnval){ [ 1990.339684] env[62405]: value = "task-1948004" [ 1990.339684] env[62405]: _type = "Task" [ 1990.339684] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1990.352606] env[62405]: DEBUG oslo_vmware.api [None req-61a37609-b77f-4af5-abb1-aa836364e517 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1948004, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1990.393505] env[62405]: DEBUG nova.compute.manager [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] [instance: 989a7146-71ea-433b-86f9-b7a0f0ee91b4] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1990.405673] env[62405]: DEBUG oslo_vmware.api [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1948001, 'name': ReconfigVM_Task, 'duration_secs': 0.50983} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1990.405936] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 5645c9a4-2640-4190-956f-00fc2ea03a3a] Reconfigured VM instance instance-00000069 to attach disk [datastore1] 5645c9a4-2640-4190-956f-00fc2ea03a3a/5645c9a4-2640-4190-956f-00fc2ea03a3a.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1990.406750] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a05b425f-34f2-4ea2-971c-db2979e09ce5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1990.413460] env[62405]: DEBUG oslo_vmware.api [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Waiting for the task: (returnval){ [ 1990.413460] env[62405]: value = "task-1948005" [ 1990.413460] env[62405]: _type = "Task" [ 1990.413460] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1990.426881] env[62405]: DEBUG oslo_vmware.api [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1948005, 'name': Rename_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1990.481872] env[62405]: DEBUG nova.network.neutron [None req-92207ad2-6b2b-4ddd-a538-b0f084ab5682 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: ec0a05fc-4a11-4e07-a03c-e357a7a750ab] Updating instance_info_cache with network_info: [{"id": "1336ca88-2020-4b2c-b082-e45e1fe68506", "address": "fa:16:3e:d9:07:e2", "network": {"id": "7398b956-045a-440c-b8fc-34bad57dbc27", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1969082667-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "41626e27199f4370a2554bb243a72d41", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "50171613-b419-45e3-9ada-fcb6cd921428", "external-id": "nsx-vlan-transportzone-914", "segmentation_id": 914, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1336ca88-20", "ovs_interfaceid": "1336ca88-2020-4b2c-b082-e45e1fe68506", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1990.605346] env[62405]: DEBUG oslo_vmware.api [None req-84ffd5f0-1098-4cf0-b1dd-b9ba82de21c7 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Task: {'id': task-1948000, 'name': PowerOnVM_Task, 'duration_secs': 0.809008} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1990.606096] env[62405]: DEBUG nova.network.neutron [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] [instance: 989a7146-71ea-433b-86f9-b7a0f0ee91b4] Successfully created port: c6153e75-b6ea-44d0-8934-10a8a2acaabf {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1990.608132] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-84ffd5f0-1098-4cf0-b1dd-b9ba82de21c7 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1990.608369] env[62405]: DEBUG nova.compute.manager [None req-84ffd5f0-1098-4cf0-b1dd-b9ba82de21c7 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1990.609199] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abd14395-d127-4937-a78f-02fbd68c2a4d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1990.710141] env[62405]: DEBUG oslo_vmware.api [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': task-1948002, 'name': Rename_Task, 'duration_secs': 0.202837} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1990.710489] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1990.710764] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3b0ea6ab-a570-48e5-b0fd-833edb84dfa0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1990.718351] env[62405]: DEBUG oslo_vmware.api [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Waiting for the task: (returnval){ [ 1990.718351] env[62405]: value = "task-1948006" [ 1990.718351] env[62405]: _type = "Task" [ 1990.718351] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1990.726028] env[62405]: DEBUG oslo_vmware.api [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1948003, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.086319} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1990.726028] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1990.726289] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce1eece6-6d99-42dc-86f2-faab44503f23 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1990.731310] env[62405]: DEBUG oslo_vmware.api [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': task-1948006, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1990.751834] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Reconfiguring VM instance instance-0000006a to attach disk [datastore1] 81d9be97-9147-4754-80c2-68c1a389842e/81d9be97-9147-4754-80c2-68c1a389842e.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1990.752214] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-14d3ffab-92ac-4c7a-bd5c-85e43bdcd862 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1990.772151] env[62405]: DEBUG oslo_vmware.api [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Waiting for the task: (returnval){ [ 1990.772151] env[62405]: value = "task-1948007" [ 1990.772151] env[62405]: _type = "Task" [ 1990.772151] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1990.781108] env[62405]: DEBUG oslo_vmware.api [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1948007, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1990.849789] env[62405]: DEBUG oslo_vmware.api [None req-61a37609-b77f-4af5-abb1-aa836364e517 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1948004, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.102088} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1990.850261] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-61a37609-b77f-4af5-abb1-aa836364e517 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1990.851108] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-516b09c9-9d4f-4a9a-b5a0-3f78fde1aa92 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1990.876066] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-61a37609-b77f-4af5-abb1-aa836364e517 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Reconfiguring VM instance instance-00000059 to attach disk [datastore1] 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d/78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1990.876377] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9d7ae8c6-52d0-4935-835e-ae7df71cf083 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1990.897035] env[62405]: DEBUG oslo_vmware.api [None req-61a37609-b77f-4af5-abb1-aa836364e517 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Waiting for the task: (returnval){ [ 1990.897035] env[62405]: value = "task-1948008" [ 1990.897035] env[62405]: _type = "Task" [ 1990.897035] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1990.908097] env[62405]: DEBUG oslo_vmware.api [None req-61a37609-b77f-4af5-abb1-aa836364e517 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1948008, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1990.923118] env[62405]: DEBUG oslo_vmware.api [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1948005, 'name': Rename_Task, 'duration_secs': 0.140442} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1990.923374] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 5645c9a4-2640-4190-956f-00fc2ea03a3a] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1990.923630] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7beb06b8-976e-4b18-89df-a9696244cd6a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1990.930190] env[62405]: DEBUG oslo_vmware.api [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Waiting for the task: (returnval){ [ 1990.930190] env[62405]: value = "task-1948009" [ 1990.930190] env[62405]: _type = "Task" [ 1990.930190] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1990.939378] env[62405]: DEBUG oslo_vmware.api [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1948009, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1990.951384] env[62405]: DEBUG nova.network.neutron [-] [instance: d937c90c-10b2-4c57-b1db-7b433c3d9017] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1990.985236] env[62405]: DEBUG oslo_concurrency.lockutils [None req-92207ad2-6b2b-4ddd-a538-b0f084ab5682 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Releasing lock "refresh_cache-ec0a05fc-4a11-4e07-a03c-e357a7a750ab" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1990.985531] env[62405]: DEBUG nova.objects.instance [None req-92207ad2-6b2b-4ddd-a538-b0f084ab5682 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Lazy-loading 'migration_context' on Instance uuid ec0a05fc-4a11-4e07-a03c-e357a7a750ab {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1991.012954] env[62405]: DEBUG nova.compute.manager [req-61855bb2-c339-461b-b4c6-4f2b587a43b4 req-90bde706-a578-44d6-8b51-4745c7482015 service nova] [instance: d937c90c-10b2-4c57-b1db-7b433c3d9017] Received event network-vif-deleted-75ce34f4-4ddb-4250-95f6-4b8af0eb3bdc {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1991.229507] env[62405]: DEBUG oslo_vmware.api [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': task-1948006, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1991.282323] env[62405]: DEBUG oslo_vmware.api [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1948007, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1991.406018] env[62405]: DEBUG oslo_vmware.api [None req-61a37609-b77f-4af5-abb1-aa836364e517 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1948008, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1991.410332] env[62405]: DEBUG nova.compute.manager [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] [instance: 989a7146-71ea-433b-86f9-b7a0f0ee91b4] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1991.437275] env[62405]: DEBUG nova.virt.hardware [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1991.437561] env[62405]: DEBUG nova.virt.hardware [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1991.437722] env[62405]: DEBUG nova.virt.hardware [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1991.437928] env[62405]: DEBUG nova.virt.hardware [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1991.438103] env[62405]: DEBUG nova.virt.hardware [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1991.438257] env[62405]: DEBUG nova.virt.hardware [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1991.438471] env[62405]: DEBUG nova.virt.hardware [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1991.438632] env[62405]: DEBUG nova.virt.hardware [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1991.438817] env[62405]: DEBUG nova.virt.hardware [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1991.438990] env[62405]: DEBUG nova.virt.hardware [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1991.439192] env[62405]: DEBUG nova.virt.hardware [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1991.440068] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abba1ad9-2eb2-4c02-884d-e8da1dcb51ab {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.445681] env[62405]: DEBUG oslo_vmware.api [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1948009, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1991.450670] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b8a7f28-7775-403a-8853-c4d471bacb72 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.454845] env[62405]: INFO nova.compute.manager [-] [instance: d937c90c-10b2-4c57-b1db-7b433c3d9017] Took 1.38 seconds to deallocate network for instance. [ 1991.488334] env[62405]: DEBUG nova.objects.base [None req-92207ad2-6b2b-4ddd-a538-b0f084ab5682 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=62405) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1991.489365] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-551f9bcd-e4e8-460f-9e05-22a0813dce2e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.510041] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a886e534-78bb-450c-b505-e2c60d51e7aa {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.516418] env[62405]: DEBUG oslo_vmware.api [None req-92207ad2-6b2b-4ddd-a538-b0f084ab5682 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for the task: (returnval){ [ 1991.516418] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]527d896e-5274-f578-597c-4fb2bf2c9c2a" [ 1991.516418] env[62405]: _type = "Task" [ 1991.516418] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1991.524850] env[62405]: DEBUG oslo_vmware.api [None req-92207ad2-6b2b-4ddd-a538-b0f084ab5682 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]527d896e-5274-f578-597c-4fb2bf2c9c2a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1991.698643] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e6347663-2fdf-4b49-8135-fd101b295081 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquiring lock "interface-556e1bca-f2f1-4200-96df-997d48ce5a15-e5b357f2-b442-4514-aa4d-9234dfa04642" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1991.698978] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e6347663-2fdf-4b49-8135-fd101b295081 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Lock "interface-556e1bca-f2f1-4200-96df-997d48ce5a15-e5b357f2-b442-4514-aa4d-9234dfa04642" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1991.731029] env[62405]: DEBUG oslo_vmware.api [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': task-1948006, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1991.784274] env[62405]: DEBUG oslo_vmware.api [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1948007, 'name': ReconfigVM_Task, 'duration_secs': 0.63873} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1991.784660] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Reconfigured VM instance instance-0000006a to attach disk [datastore1] 81d9be97-9147-4754-80c2-68c1a389842e/81d9be97-9147-4754-80c2-68c1a389842e.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1991.785374] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-95ef565b-96cc-4fa5-8cae-e8c66165ccf0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.792703] env[62405]: DEBUG oslo_vmware.api [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Waiting for the task: (returnval){ [ 1991.792703] env[62405]: value = "task-1948010" [ 1991.792703] env[62405]: _type = "Task" [ 1991.792703] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1991.805621] env[62405]: DEBUG oslo_vmware.api [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1948010, 'name': Rename_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1991.906769] env[62405]: DEBUG oslo_vmware.api [None req-61a37609-b77f-4af5-abb1-aa836364e517 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1948008, 'name': ReconfigVM_Task, 'duration_secs': 0.973586} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1991.907109] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-61a37609-b77f-4af5-abb1-aa836364e517 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Reconfigured VM instance instance-00000059 to attach disk [datastore1] 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d/78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1991.907766] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cda6b239-2b48-4d9f-924f-7502b74aa0be {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.914110] env[62405]: DEBUG oslo_vmware.api [None req-61a37609-b77f-4af5-abb1-aa836364e517 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Waiting for the task: (returnval){ [ 1991.914110] env[62405]: value = "task-1948011" [ 1991.914110] env[62405]: _type = "Task" [ 1991.914110] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1991.923049] env[62405]: DEBUG oslo_vmware.api [None req-61a37609-b77f-4af5-abb1-aa836364e517 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1948011, 'name': Rename_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1991.941130] env[62405]: DEBUG oslo_vmware.api [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1948009, 'name': PowerOnVM_Task} progress is 94%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1991.971986] env[62405]: DEBUG oslo_concurrency.lockutils [None req-361f3b8b-f11b-417c-a68d-323aa69af7eb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1991.972218] env[62405]: DEBUG oslo_concurrency.lockutils [None req-361f3b8b-f11b-417c-a68d-323aa69af7eb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1991.973038] env[62405]: DEBUG nova.objects.instance [None req-361f3b8b-f11b-417c-a68d-323aa69af7eb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Lazy-loading 'resources' on Instance uuid d937c90c-10b2-4c57-b1db-7b433c3d9017 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1992.029560] env[62405]: DEBUG oslo_vmware.api [None req-92207ad2-6b2b-4ddd-a538-b0f084ab5682 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]527d896e-5274-f578-597c-4fb2bf2c9c2a, 'name': SearchDatastore_Task, 'duration_secs': 0.012937} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1992.030285] env[62405]: DEBUG oslo_concurrency.lockutils [None req-92207ad2-6b2b-4ddd-a538-b0f084ab5682 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1992.084956] env[62405]: DEBUG nova.compute.manager [req-e8a19105-b575-4039-bd2d-9f7b3e2b1171 req-06154b75-c4dd-4db9-b82e-801766c1262b service nova] [instance: 989a7146-71ea-433b-86f9-b7a0f0ee91b4] Received event network-vif-plugged-c6153e75-b6ea-44d0-8934-10a8a2acaabf {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1992.085218] env[62405]: DEBUG oslo_concurrency.lockutils [req-e8a19105-b575-4039-bd2d-9f7b3e2b1171 req-06154b75-c4dd-4db9-b82e-801766c1262b service nova] Acquiring lock "989a7146-71ea-433b-86f9-b7a0f0ee91b4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1992.085447] env[62405]: DEBUG oslo_concurrency.lockutils [req-e8a19105-b575-4039-bd2d-9f7b3e2b1171 req-06154b75-c4dd-4db9-b82e-801766c1262b service nova] Lock "989a7146-71ea-433b-86f9-b7a0f0ee91b4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1992.085635] env[62405]: DEBUG oslo_concurrency.lockutils [req-e8a19105-b575-4039-bd2d-9f7b3e2b1171 req-06154b75-c4dd-4db9-b82e-801766c1262b service nova] Lock "989a7146-71ea-433b-86f9-b7a0f0ee91b4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1992.085821] env[62405]: DEBUG nova.compute.manager [req-e8a19105-b575-4039-bd2d-9f7b3e2b1171 req-06154b75-c4dd-4db9-b82e-801766c1262b service nova] [instance: 989a7146-71ea-433b-86f9-b7a0f0ee91b4] No waiting events found dispatching network-vif-plugged-c6153e75-b6ea-44d0-8934-10a8a2acaabf {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1992.085993] env[62405]: WARNING nova.compute.manager [req-e8a19105-b575-4039-bd2d-9f7b3e2b1171 req-06154b75-c4dd-4db9-b82e-801766c1262b service nova] [instance: 989a7146-71ea-433b-86f9-b7a0f0ee91b4] Received unexpected event network-vif-plugged-c6153e75-b6ea-44d0-8934-10a8a2acaabf for instance with vm_state building and task_state spawning. [ 1992.173942] env[62405]: DEBUG nova.network.neutron [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] [instance: 989a7146-71ea-433b-86f9-b7a0f0ee91b4] Successfully updated port: c6153e75-b6ea-44d0-8934-10a8a2acaabf {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1992.202601] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e6347663-2fdf-4b49-8135-fd101b295081 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquiring lock "556e1bca-f2f1-4200-96df-997d48ce5a15" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1992.202864] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e6347663-2fdf-4b49-8135-fd101b295081 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquired lock "556e1bca-f2f1-4200-96df-997d48ce5a15" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1992.203900] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af39fddc-52d7-4111-b8e1-766f9b48e3bd {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.231688] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56d96f58-fd22-4d0e-bcba-da3247ba5679 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.240115] env[62405]: DEBUG oslo_vmware.api [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': task-1948006, 'name': PowerOnVM_Task, 'duration_secs': 1.121092} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1992.256295] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1992.266478] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-e6347663-2fdf-4b49-8135-fd101b295081 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] Reconfiguring VM to detach interface {{(pid=62405) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1992.267328] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8c225771-67cd-4a6d-8d21-c61404194f48 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.287457] env[62405]: DEBUG oslo_vmware.api [None req-e6347663-2fdf-4b49-8135-fd101b295081 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Waiting for the task: (returnval){ [ 1992.287457] env[62405]: value = "task-1948012" [ 1992.287457] env[62405]: _type = "Task" [ 1992.287457] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1992.299427] env[62405]: DEBUG oslo_vmware.api [None req-e6347663-2fdf-4b49-8135-fd101b295081 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1948012, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1992.305079] env[62405]: DEBUG oslo_vmware.api [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1948010, 'name': Rename_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1992.375328] env[62405]: DEBUG nova.compute.manager [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1992.376256] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b58c8664-ad4f-48ca-872c-9db14bd9d014 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.424945] env[62405]: DEBUG oslo_vmware.api [None req-61a37609-b77f-4af5-abb1-aa836364e517 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1948011, 'name': Rename_Task, 'duration_secs': 0.210709} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1992.427036] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-61a37609-b77f-4af5-abb1-aa836364e517 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1992.427036] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9020b14c-c2db-491e-b791-7a73696ebdbf {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.436340] env[62405]: DEBUG oslo_vmware.api [None req-61a37609-b77f-4af5-abb1-aa836364e517 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Waiting for the task: (returnval){ [ 1992.436340] env[62405]: value = "task-1948013" [ 1992.436340] env[62405]: _type = "Task" [ 1992.436340] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1992.442512] env[62405]: DEBUG oslo_vmware.api [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1948009, 'name': PowerOnVM_Task, 'duration_secs': 1.08316} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1992.443144] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 5645c9a4-2640-4190-956f-00fc2ea03a3a] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1992.443361] env[62405]: INFO nova.compute.manager [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 5645c9a4-2640-4190-956f-00fc2ea03a3a] Took 13.42 seconds to spawn the instance on the hypervisor. [ 1992.443544] env[62405]: DEBUG nova.compute.manager [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 5645c9a4-2640-4190-956f-00fc2ea03a3a] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1992.444285] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20a24a74-dd95-4f25-98d0-83d37ba0120e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.450036] env[62405]: DEBUG oslo_vmware.api [None req-61a37609-b77f-4af5-abb1-aa836364e517 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1948013, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1992.678026] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Acquiring lock "refresh_cache-989a7146-71ea-433b-86f9-b7a0f0ee91b4" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1992.678026] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Acquired lock "refresh_cache-989a7146-71ea-433b-86f9-b7a0f0ee91b4" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1992.678026] env[62405]: DEBUG nova.network.neutron [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] [instance: 989a7146-71ea-433b-86f9-b7a0f0ee91b4] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1992.781017] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21209189-1597-47af-8f57-f052f065f5b9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.787495] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21b1e364-4608-47e3-b997-e4480696cd18 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.831009] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e36ee95-7a3c-46c4-a02e-e2062840e70a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.834430] env[62405]: DEBUG oslo_vmware.api [None req-e6347663-2fdf-4b49-8135-fd101b295081 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1948012, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1992.839067] env[62405]: DEBUG oslo_vmware.api [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1948010, 'name': Rename_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1992.844767] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16506d9d-c447-44be-995c-c5f0fa11ba55 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.860604] env[62405]: DEBUG nova.compute.provider_tree [None req-361f3b8b-f11b-417c-a68d-323aa69af7eb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1992.894590] env[62405]: DEBUG oslo_concurrency.lockutils [None req-1f557c6b-5d89-4f2e-81dd-147ca7c0fc12 tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Lock "f16e3d13-6db6-4f61-b0e4-661856a9166b" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 39.878s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1992.950809] env[62405]: DEBUG oslo_vmware.api [None req-61a37609-b77f-4af5-abb1-aa836364e517 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1948013, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1992.970695] env[62405]: INFO nova.compute.manager [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 5645c9a4-2640-4190-956f-00fc2ea03a3a] Took 26.21 seconds to build instance. [ 1993.210474] env[62405]: DEBUG nova.network.neutron [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] [instance: 989a7146-71ea-433b-86f9-b7a0f0ee91b4] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1993.286925] env[62405]: DEBUG oslo_vmware.rw_handles [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5200af3a-52d9-e669-3d5b-34d3114918eb/disk-0.vmdk. {{(pid=62405) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1993.288106] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-168b76ea-6897-4746-a7f6-5b4a5862626e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1993.298856] env[62405]: DEBUG oslo_vmware.rw_handles [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5200af3a-52d9-e669-3d5b-34d3114918eb/disk-0.vmdk is in state: ready. {{(pid=62405) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1993.299067] env[62405]: ERROR oslo_vmware.rw_handles [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5200af3a-52d9-e669-3d5b-34d3114918eb/disk-0.vmdk due to incomplete transfer. [ 1993.302226] env[62405]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-1f52470a-4287-4aea-865c-dc018ca169e4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1993.306961] env[62405]: DEBUG oslo_vmware.api [None req-e6347663-2fdf-4b49-8135-fd101b295081 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1948012, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1993.311712] env[62405]: DEBUG oslo_vmware.api [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1948010, 'name': Rename_Task, 'duration_secs': 1.195408} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1993.312020] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1993.312384] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-18c86eaf-0615-42ea-9739-f617ca37fa75 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1993.314590] env[62405]: DEBUG oslo_vmware.rw_handles [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5200af3a-52d9-e669-3d5b-34d3114918eb/disk-0.vmdk. {{(pid=62405) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1993.314753] env[62405]: DEBUG nova.virt.vmwareapi.images [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Uploaded image ca187b4d-a52b-4628-a4f9-f6cf89613d47 to the Glance image server {{(pid=62405) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1993.317102] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Destroying the VM {{(pid=62405) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1993.318030] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-bcff4dca-43ab-440d-afd0-be0af1d9713c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1993.322506] env[62405]: DEBUG oslo_vmware.api [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Waiting for the task: (returnval){ [ 1993.322506] env[62405]: value = "task-1948014" [ 1993.322506] env[62405]: _type = "Task" [ 1993.322506] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1993.327083] env[62405]: DEBUG oslo_vmware.api [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Waiting for the task: (returnval){ [ 1993.327083] env[62405]: value = "task-1948015" [ 1993.327083] env[62405]: _type = "Task" [ 1993.327083] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1993.336150] env[62405]: DEBUG oslo_vmware.api [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1948014, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1993.339475] env[62405]: DEBUG oslo_vmware.api [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948015, 'name': Destroy_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1993.367025] env[62405]: DEBUG nova.scheduler.client.report [None req-361f3b8b-f11b-417c-a68d-323aa69af7eb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1993.409322] env[62405]: DEBUG nova.network.neutron [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] [instance: 989a7146-71ea-433b-86f9-b7a0f0ee91b4] Updating instance_info_cache with network_info: [{"id": "c6153e75-b6ea-44d0-8934-10a8a2acaabf", "address": "fa:16:3e:eb:45:a0", "network": {"id": "f9d5240e-713c-41d2-8977-b4716137304e", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-1986634968-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8c96fbdce2e94fc9831663c0ff069ef9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ab9e5e6-9bf8-4a8d-91c8-d22148e3d2ee", "external-id": "nsx-vlan-transportzone-401", "segmentation_id": 401, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc6153e75-b6", "ovs_interfaceid": "c6153e75-b6ea-44d0-8934-10a8a2acaabf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1993.452773] env[62405]: DEBUG oslo_vmware.api [None req-61a37609-b77f-4af5-abb1-aa836364e517 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1948013, 'name': PowerOnVM_Task, 'duration_secs': 0.670892} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1993.452773] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-61a37609-b77f-4af5-abb1-aa836364e517 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1993.452773] env[62405]: DEBUG nova.compute.manager [None req-61a37609-b77f-4af5-abb1-aa836364e517 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1993.452773] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f62dc58a-feb3-4a41-a2bd-c4242e54d72d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1993.473473] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5ed0bde8-3248-45e4-9ff6-5afc08151576 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Lock "5645c9a4-2640-4190-956f-00fc2ea03a3a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.724s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1993.806192] env[62405]: DEBUG oslo_vmware.api [None req-e6347663-2fdf-4b49-8135-fd101b295081 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1948012, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1993.833879] env[62405]: DEBUG oslo_vmware.api [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1948014, 'name': PowerOnVM_Task, 'duration_secs': 0.496363} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1993.834589] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1993.834827] env[62405]: INFO nova.compute.manager [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Took 11.26 seconds to spawn the instance on the hypervisor. [ 1993.835038] env[62405]: DEBUG nova.compute.manager [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1993.835789] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0733f9d8-716b-4412-a9ac-2fd7289ca3b8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1993.841652] env[62405]: DEBUG oslo_vmware.api [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948015, 'name': Destroy_Task} progress is 33%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1993.872395] env[62405]: DEBUG oslo_concurrency.lockutils [None req-361f3b8b-f11b-417c-a68d-323aa69af7eb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.900s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1993.875752] env[62405]: DEBUG oslo_concurrency.lockutils [None req-92207ad2-6b2b-4ddd-a538-b0f084ab5682 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 1.845s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1993.903697] env[62405]: INFO nova.scheduler.client.report [None req-361f3b8b-f11b-417c-a68d-323aa69af7eb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Deleted allocations for instance d937c90c-10b2-4c57-b1db-7b433c3d9017 [ 1993.912572] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Releasing lock "refresh_cache-989a7146-71ea-433b-86f9-b7a0f0ee91b4" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1993.913696] env[62405]: DEBUG nova.compute.manager [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] [instance: 989a7146-71ea-433b-86f9-b7a0f0ee91b4] Instance network_info: |[{"id": "c6153e75-b6ea-44d0-8934-10a8a2acaabf", "address": "fa:16:3e:eb:45:a0", "network": {"id": "f9d5240e-713c-41d2-8977-b4716137304e", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-1986634968-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8c96fbdce2e94fc9831663c0ff069ef9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ab9e5e6-9bf8-4a8d-91c8-d22148e3d2ee", "external-id": "nsx-vlan-transportzone-401", "segmentation_id": 401, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc6153e75-b6", "ovs_interfaceid": "c6153e75-b6ea-44d0-8934-10a8a2acaabf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1993.913696] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] [instance: 989a7146-71ea-433b-86f9-b7a0f0ee91b4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:eb:45:a0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8ab9e5e6-9bf8-4a8d-91c8-d22148e3d2ee', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c6153e75-b6ea-44d0-8934-10a8a2acaabf', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1993.922627] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Creating folder: Project (8c96fbdce2e94fc9831663c0ff069ef9). Parent ref: group-v401284. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1993.923019] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4ace94fb-9298-413a-bd78-5072f6533d53 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1993.935836] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Created folder: Project (8c96fbdce2e94fc9831663c0ff069ef9) in parent group-v401284. [ 1993.935836] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Creating folder: Instances. Parent ref: group-v401567. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1993.936035] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c2aedb11-f3a8-4b30-9a5a-3a8091748fb8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1993.948772] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Created folder: Instances in parent group-v401567. [ 1993.949091] env[62405]: DEBUG oslo.service.loopingcall [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1993.949309] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 989a7146-71ea-433b-86f9-b7a0f0ee91b4] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1993.949973] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fb1b1b6a-f84a-41e7-9800-707eeb4ba291 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1993.975653] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1993.975653] env[62405]: value = "task-1948018" [ 1993.975653] env[62405]: _type = "Task" [ 1993.975653] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1993.976158] env[62405]: DEBUG oslo_concurrency.lockutils [None req-61a37609-b77f-4af5-abb1-aa836364e517 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1993.987083] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1948018, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1994.304652] env[62405]: DEBUG oslo_vmware.api [None req-e6347663-2fdf-4b49-8135-fd101b295081 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1948012, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1994.338541] env[62405]: DEBUG oslo_vmware.api [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948015, 'name': Destroy_Task, 'duration_secs': 0.608465} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1994.339342] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Destroyed the VM [ 1994.339342] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Deleting Snapshot of the VM instance {{(pid=62405) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1994.339435] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-8aa7750f-4242-4cb4-b661-d9fb6f7ee2f8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.348030] env[62405]: DEBUG oslo_vmware.api [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Waiting for the task: (returnval){ [ 1994.348030] env[62405]: value = "task-1948019" [ 1994.348030] env[62405]: _type = "Task" [ 1994.348030] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1994.361694] env[62405]: DEBUG oslo_vmware.api [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948019, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1994.364181] env[62405]: INFO nova.compute.manager [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Took 25.23 seconds to build instance. [ 1994.411290] env[62405]: DEBUG oslo_concurrency.lockutils [None req-361f3b8b-f11b-417c-a68d-323aa69af7eb tempest-ServerDiskConfigTestJSON-1342754001 tempest-ServerDiskConfigTestJSON-1342754001-project-member] Lock "d937c90c-10b2-4c57-b1db-7b433c3d9017" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.825s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1994.426279] env[62405]: DEBUG nova.compute.manager [req-53a9a0a7-8385-4c2d-8a09-9f697bb4a79b req-537b643e-df5a-4d5a-95a9-8a2aedc3467a service nova] [instance: 989a7146-71ea-433b-86f9-b7a0f0ee91b4] Received event network-changed-c6153e75-b6ea-44d0-8934-10a8a2acaabf {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1994.426350] env[62405]: DEBUG nova.compute.manager [req-53a9a0a7-8385-4c2d-8a09-9f697bb4a79b req-537b643e-df5a-4d5a-95a9-8a2aedc3467a service nova] [instance: 989a7146-71ea-433b-86f9-b7a0f0ee91b4] Refreshing instance network info cache due to event network-changed-c6153e75-b6ea-44d0-8934-10a8a2acaabf. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1994.426556] env[62405]: DEBUG oslo_concurrency.lockutils [req-53a9a0a7-8385-4c2d-8a09-9f697bb4a79b req-537b643e-df5a-4d5a-95a9-8a2aedc3467a service nova] Acquiring lock "refresh_cache-989a7146-71ea-433b-86f9-b7a0f0ee91b4" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1994.427116] env[62405]: DEBUG oslo_concurrency.lockutils [req-53a9a0a7-8385-4c2d-8a09-9f697bb4a79b req-537b643e-df5a-4d5a-95a9-8a2aedc3467a service nova] Acquired lock "refresh_cache-989a7146-71ea-433b-86f9-b7a0f0ee91b4" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1994.427116] env[62405]: DEBUG nova.network.neutron [req-53a9a0a7-8385-4c2d-8a09-9f697bb4a79b req-537b643e-df5a-4d5a-95a9-8a2aedc3467a service nova] [instance: 989a7146-71ea-433b-86f9-b7a0f0ee91b4] Refreshing network info cache for port c6153e75-b6ea-44d0-8934-10a8a2acaabf {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1994.485483] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1948018, 'name': CreateVM_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1994.717347] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2d9ad76-c9af-4182-a3e2-22b1da1b4c34 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.726764] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a3bd883-c097-4882-9b53-cca891d7509c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.757908] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fec47296-ab91-478c-887e-01b1f8d7f361 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.767020] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16ac293a-e2b5-4d05-bbe7-055b4d93a37a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.781198] env[62405]: DEBUG nova.compute.provider_tree [None req-92207ad2-6b2b-4ddd-a538-b0f084ab5682 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1994.803253] env[62405]: DEBUG oslo_vmware.api [None req-e6347663-2fdf-4b49-8135-fd101b295081 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1948012, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1994.861343] env[62405]: DEBUG oslo_vmware.api [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948019, 'name': RemoveSnapshot_Task, 'duration_secs': 0.408245} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1994.861763] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Deleted Snapshot of the VM instance {{(pid=62405) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1994.862007] env[62405]: DEBUG nova.compute.manager [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1994.862922] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4cd1efd-2a66-4b66-9c22-9c4d516548c6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.865897] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4d82cece-a3ab-4d14-af09-a1ae57087981 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Lock "81d9be97-9147-4754-80c2-68c1a389842e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.746s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1994.986482] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1948018, 'name': CreateVM_Task, 'duration_secs': 0.605849} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1994.986651] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 989a7146-71ea-433b-86f9-b7a0f0ee91b4] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1994.987358] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1994.987545] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1994.987875] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1994.988139] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fd61a626-3af3-4bd3-bf63-f776519a9119 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.992742] env[62405]: DEBUG oslo_vmware.api [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Waiting for the task: (returnval){ [ 1994.992742] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52bf1f49-6d3f-96b8-c084-4205f6fb06d7" [ 1994.992742] env[62405]: _type = "Task" [ 1994.992742] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1995.000299] env[62405]: DEBUG oslo_vmware.api [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52bf1f49-6d3f-96b8-c084-4205f6fb06d7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1995.284183] env[62405]: DEBUG nova.scheduler.client.report [None req-92207ad2-6b2b-4ddd-a538-b0f084ab5682 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1995.303382] env[62405]: DEBUG oslo_vmware.api [None req-e6347663-2fdf-4b49-8135-fd101b295081 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1948012, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1995.376216] env[62405]: INFO nova.compute.manager [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Shelve offloading [ 1995.428423] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0dabb350-d33b-4e16-91ec-8313dee3edfb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Acquiring lock "5645c9a4-2640-4190-956f-00fc2ea03a3a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1995.428780] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0dabb350-d33b-4e16-91ec-8313dee3edfb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Lock "5645c9a4-2640-4190-956f-00fc2ea03a3a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1995.429075] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0dabb350-d33b-4e16-91ec-8313dee3edfb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Acquiring lock "5645c9a4-2640-4190-956f-00fc2ea03a3a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1995.429272] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0dabb350-d33b-4e16-91ec-8313dee3edfb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Lock "5645c9a4-2640-4190-956f-00fc2ea03a3a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1995.429445] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0dabb350-d33b-4e16-91ec-8313dee3edfb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Lock "5645c9a4-2640-4190-956f-00fc2ea03a3a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1995.431486] env[62405]: INFO nova.compute.manager [None req-0dabb350-d33b-4e16-91ec-8313dee3edfb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 5645c9a4-2640-4190-956f-00fc2ea03a3a] Terminating instance [ 1995.517269] env[62405]: DEBUG oslo_vmware.api [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52bf1f49-6d3f-96b8-c084-4205f6fb06d7, 'name': SearchDatastore_Task, 'duration_secs': 0.031132} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1995.517615] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1995.517818] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] [instance: 989a7146-71ea-433b-86f9-b7a0f0ee91b4] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1995.518060] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1995.518214] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1995.518389] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1995.518662] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8162a507-2565-42a3-9d31-bcee48f410e6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1995.531665] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1995.531850] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1995.532580] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c9f7a071-40d8-4bef-b9c3-ff756fd1e807 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1995.535589] env[62405]: DEBUG nova.network.neutron [req-53a9a0a7-8385-4c2d-8a09-9f697bb4a79b req-537b643e-df5a-4d5a-95a9-8a2aedc3467a service nova] [instance: 989a7146-71ea-433b-86f9-b7a0f0ee91b4] Updated VIF entry in instance network info cache for port c6153e75-b6ea-44d0-8934-10a8a2acaabf. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1995.535856] env[62405]: DEBUG nova.network.neutron [req-53a9a0a7-8385-4c2d-8a09-9f697bb4a79b req-537b643e-df5a-4d5a-95a9-8a2aedc3467a service nova] [instance: 989a7146-71ea-433b-86f9-b7a0f0ee91b4] Updating instance_info_cache with network_info: [{"id": "c6153e75-b6ea-44d0-8934-10a8a2acaabf", "address": "fa:16:3e:eb:45:a0", "network": {"id": "f9d5240e-713c-41d2-8977-b4716137304e", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-1986634968-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8c96fbdce2e94fc9831663c0ff069ef9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ab9e5e6-9bf8-4a8d-91c8-d22148e3d2ee", "external-id": "nsx-vlan-transportzone-401", "segmentation_id": 401, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc6153e75-b6", "ovs_interfaceid": "c6153e75-b6ea-44d0-8934-10a8a2acaabf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1995.540179] env[62405]: DEBUG oslo_vmware.api [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Waiting for the task: (returnval){ [ 1995.540179] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52fabed3-df36-050c-d84d-6b332ff9427c" [ 1995.540179] env[62405]: _type = "Task" [ 1995.540179] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1995.548501] env[62405]: DEBUG oslo_vmware.api [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52fabed3-df36-050c-d84d-6b332ff9427c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1995.811924] env[62405]: DEBUG oslo_vmware.api [None req-e6347663-2fdf-4b49-8135-fd101b295081 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1948012, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1995.888253] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1995.888253] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6792754e-a465-4846-b3c2-0be479f24772 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1995.895011] env[62405]: DEBUG oslo_vmware.api [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Waiting for the task: (returnval){ [ 1995.895011] env[62405]: value = "task-1948020" [ 1995.895011] env[62405]: _type = "Task" [ 1995.895011] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1995.911486] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] VM already powered off {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1995.911651] env[62405]: DEBUG nova.compute.manager [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1995.912672] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6d4faa7-ebb1-481a-b5cc-be11b09c7928 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1995.920533] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Acquiring lock "refresh_cache-6fcfada3-d73a-4814-bf45-d34b26d76d4a" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1995.920672] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Acquired lock "refresh_cache-6fcfada3-d73a-4814-bf45-d34b26d76d4a" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1995.920851] env[62405]: DEBUG nova.network.neutron [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1995.935495] env[62405]: DEBUG nova.compute.manager [None req-0dabb350-d33b-4e16-91ec-8313dee3edfb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 5645c9a4-2640-4190-956f-00fc2ea03a3a] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1995.935641] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-0dabb350-d33b-4e16-91ec-8313dee3edfb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 5645c9a4-2640-4190-956f-00fc2ea03a3a] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1995.938020] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca460a66-2018-4afb-be4b-d06f0704a3f5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1995.947920] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-0dabb350-d33b-4e16-91ec-8313dee3edfb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 5645c9a4-2640-4190-956f-00fc2ea03a3a] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1995.948223] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-986ef706-ad84-4dbb-bc6e-6f0ebc040414 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1995.955823] env[62405]: DEBUG oslo_vmware.api [None req-0dabb350-d33b-4e16-91ec-8313dee3edfb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Waiting for the task: (returnval){ [ 1995.955823] env[62405]: value = "task-1948021" [ 1995.955823] env[62405]: _type = "Task" [ 1995.955823] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1995.964947] env[62405]: DEBUG oslo_vmware.api [None req-0dabb350-d33b-4e16-91ec-8313dee3edfb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1948021, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1996.040405] env[62405]: DEBUG oslo_concurrency.lockutils [req-53a9a0a7-8385-4c2d-8a09-9f697bb4a79b req-537b643e-df5a-4d5a-95a9-8a2aedc3467a service nova] Releasing lock "refresh_cache-989a7146-71ea-433b-86f9-b7a0f0ee91b4" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1996.055138] env[62405]: DEBUG oslo_vmware.api [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52fabed3-df36-050c-d84d-6b332ff9427c, 'name': SearchDatastore_Task, 'duration_secs': 0.030217} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1996.056265] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d6b3e38e-9509-4255-814c-0e52111fec94 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1996.063203] env[62405]: DEBUG oslo_vmware.api [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Waiting for the task: (returnval){ [ 1996.063203] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52adad0b-1b07-9dcc-d86c-05888b7e623d" [ 1996.063203] env[62405]: _type = "Task" [ 1996.063203] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1996.072670] env[62405]: DEBUG oslo_vmware.api [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52adad0b-1b07-9dcc-d86c-05888b7e623d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1996.304509] env[62405]: DEBUG oslo_concurrency.lockutils [None req-92207ad2-6b2b-4ddd-a538-b0f084ab5682 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.429s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1996.311229] env[62405]: DEBUG oslo_concurrency.lockutils [None req-61a37609-b77f-4af5-abb1-aa836364e517 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 2.335s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1996.311497] env[62405]: DEBUG nova.objects.instance [None req-61a37609-b77f-4af5-abb1-aa836364e517 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62405) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1996.325484] env[62405]: DEBUG oslo_vmware.api [None req-e6347663-2fdf-4b49-8135-fd101b295081 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1948012, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1996.471927] env[62405]: DEBUG oslo_vmware.api [None req-0dabb350-d33b-4e16-91ec-8313dee3edfb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1948021, 'name': PowerOffVM_Task, 'duration_secs': 0.502495} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1996.471927] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-0dabb350-d33b-4e16-91ec-8313dee3edfb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 5645c9a4-2640-4190-956f-00fc2ea03a3a] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1996.471927] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-0dabb350-d33b-4e16-91ec-8313dee3edfb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 5645c9a4-2640-4190-956f-00fc2ea03a3a] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1996.471927] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7812691d-c772-4e2b-8f25-cb5baafe3aa7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1996.575394] env[62405]: DEBUG oslo_vmware.api [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52adad0b-1b07-9dcc-d86c-05888b7e623d, 'name': SearchDatastore_Task, 'duration_secs': 0.023074} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1996.575879] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1996.578063] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 989a7146-71ea-433b-86f9-b7a0f0ee91b4/989a7146-71ea-433b-86f9-b7a0f0ee91b4.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1996.578063] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-74e09531-ea72-47e4-b505-148f12b82439 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1996.587022] env[62405]: DEBUG oslo_vmware.api [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Waiting for the task: (returnval){ [ 1996.587022] env[62405]: value = "task-1948023" [ 1996.587022] env[62405]: _type = "Task" [ 1996.587022] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1996.593556] env[62405]: DEBUG oslo_vmware.api [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Task: {'id': task-1948023, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1996.607714] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba531dd2-5335-4143-a8b2-d2df1cea36d6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1996.615052] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-64c18a5a-7556-4112-a8a5-2d7d9512443f tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Suspending the VM {{(pid=62405) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1996.615052] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-9370e56e-897d-4edc-8e99-e6aa589442f2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1996.620916] env[62405]: DEBUG oslo_vmware.api [None req-64c18a5a-7556-4112-a8a5-2d7d9512443f tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Waiting for the task: (returnval){ [ 1996.620916] env[62405]: value = "task-1948024" [ 1996.620916] env[62405]: _type = "Task" [ 1996.620916] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1996.629766] env[62405]: DEBUG oslo_vmware.api [None req-64c18a5a-7556-4112-a8a5-2d7d9512443f tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': task-1948024, 'name': SuspendVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1996.631085] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-0dabb350-d33b-4e16-91ec-8313dee3edfb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 5645c9a4-2640-4190-956f-00fc2ea03a3a] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1996.631715] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-0dabb350-d33b-4e16-91ec-8313dee3edfb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 5645c9a4-2640-4190-956f-00fc2ea03a3a] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1996.631715] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-0dabb350-d33b-4e16-91ec-8313dee3edfb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Deleting the datastore file [datastore1] 5645c9a4-2640-4190-956f-00fc2ea03a3a {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1996.632766] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f6a1712b-c006-4a54-b955-84f59fcec7ba {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1996.638449] env[62405]: DEBUG oslo_vmware.api [None req-0dabb350-d33b-4e16-91ec-8313dee3edfb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Waiting for the task: (returnval){ [ 1996.638449] env[62405]: value = "task-1948025" [ 1996.638449] env[62405]: _type = "Task" [ 1996.638449] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1996.647402] env[62405]: DEBUG oslo_vmware.api [None req-0dabb350-d33b-4e16-91ec-8313dee3edfb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1948025, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1996.737403] env[62405]: DEBUG nova.compute.manager [req-64fbdacb-f60a-478c-b24b-b042eb68232e req-5a0cba68-36cd-4056-b8c3-4591d8dcf5e0 service nova] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Received event network-changed-2ba16494-2db9-4083-9a27-d4f12dac6ba1 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1996.737721] env[62405]: DEBUG nova.compute.manager [req-64fbdacb-f60a-478c-b24b-b042eb68232e req-5a0cba68-36cd-4056-b8c3-4591d8dcf5e0 service nova] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Refreshing instance network info cache due to event network-changed-2ba16494-2db9-4083-9a27-d4f12dac6ba1. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1996.737834] env[62405]: DEBUG oslo_concurrency.lockutils [req-64fbdacb-f60a-478c-b24b-b042eb68232e req-5a0cba68-36cd-4056-b8c3-4591d8dcf5e0 service nova] Acquiring lock "refresh_cache-81d9be97-9147-4754-80c2-68c1a389842e" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1996.737973] env[62405]: DEBUG oslo_concurrency.lockutils [req-64fbdacb-f60a-478c-b24b-b042eb68232e req-5a0cba68-36cd-4056-b8c3-4591d8dcf5e0 service nova] Acquired lock "refresh_cache-81d9be97-9147-4754-80c2-68c1a389842e" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1996.739788] env[62405]: DEBUG nova.network.neutron [req-64fbdacb-f60a-478c-b24b-b042eb68232e req-5a0cba68-36cd-4056-b8c3-4591d8dcf5e0 service nova] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Refreshing network info cache for port 2ba16494-2db9-4083-9a27-d4f12dac6ba1 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1996.836226] env[62405]: DEBUG oslo_vmware.api [None req-e6347663-2fdf-4b49-8135-fd101b295081 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1948012, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1996.905911] env[62405]: INFO nova.scheduler.client.report [None req-92207ad2-6b2b-4ddd-a538-b0f084ab5682 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Deleted allocation for migration 215904f6-d5c8-46ab-a546-1d37550b6512 [ 1997.096573] env[62405]: DEBUG oslo_vmware.api [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Task: {'id': task-1948023, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1997.110422] env[62405]: DEBUG nova.network.neutron [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Updating instance_info_cache with network_info: [{"id": "e84f02c8-cde2-4f59-88cd-ef80e8cc1bba", "address": "fa:16:3e:f1:09:bd", "network": {"id": "feb681f7-0a8f-4000-89ec-88a027ee7f15", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-478938422-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.209", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28cfe90f16b140018a5802c02f751d9c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c894ab55-c869-4530-9702-cb46d173ce94", "external-id": "nsx-vlan-transportzone-792", "segmentation_id": 792, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape84f02c8-cd", "ovs_interfaceid": "e84f02c8-cde2-4f59-88cd-ef80e8cc1bba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1997.132958] env[62405]: DEBUG oslo_vmware.api [None req-64c18a5a-7556-4112-a8a5-2d7d9512443f tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': task-1948024, 'name': SuspendVM_Task} progress is 54%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1997.149077] env[62405]: DEBUG oslo_vmware.api [None req-0dabb350-d33b-4e16-91ec-8313dee3edfb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1948025, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1997.188275] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2c3dedc8-384c-4583-9939-6f8aef706042 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Acquiring lock "ca0ff947-1ae0-4f19-ae71-0784f2c20ebe" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1997.188542] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2c3dedc8-384c-4583-9939-6f8aef706042 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Lock "ca0ff947-1ae0-4f19-ae71-0784f2c20ebe" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1997.188755] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2c3dedc8-384c-4583-9939-6f8aef706042 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Acquiring lock "ca0ff947-1ae0-4f19-ae71-0784f2c20ebe-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1997.189069] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2c3dedc8-384c-4583-9939-6f8aef706042 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Lock "ca0ff947-1ae0-4f19-ae71-0784f2c20ebe-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1997.189366] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2c3dedc8-384c-4583-9939-6f8aef706042 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Lock "ca0ff947-1ae0-4f19-ae71-0784f2c20ebe-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1997.191713] env[62405]: INFO nova.compute.manager [None req-2c3dedc8-384c-4583-9939-6f8aef706042 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: ca0ff947-1ae0-4f19-ae71-0784f2c20ebe] Terminating instance [ 1997.335270] env[62405]: DEBUG oslo_concurrency.lockutils [None req-61a37609-b77f-4af5-abb1-aa836364e517 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.024s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1997.338376] env[62405]: DEBUG oslo_vmware.api [None req-e6347663-2fdf-4b49-8135-fd101b295081 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1948012, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1997.415543] env[62405]: DEBUG oslo_concurrency.lockutils [None req-92207ad2-6b2b-4ddd-a538-b0f084ab5682 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Lock "ec0a05fc-4a11-4e07-a03c-e357a7a750ab" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 9.228s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1997.573418] env[62405]: DEBUG nova.network.neutron [req-64fbdacb-f60a-478c-b24b-b042eb68232e req-5a0cba68-36cd-4056-b8c3-4591d8dcf5e0 service nova] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Updated VIF entry in instance network info cache for port 2ba16494-2db9-4083-9a27-d4f12dac6ba1. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1997.573870] env[62405]: DEBUG nova.network.neutron [req-64fbdacb-f60a-478c-b24b-b042eb68232e req-5a0cba68-36cd-4056-b8c3-4591d8dcf5e0 service nova] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Updating instance_info_cache with network_info: [{"id": "2ba16494-2db9-4083-9a27-d4f12dac6ba1", "address": "fa:16:3e:66:df:57", "network": {"id": "bb161d6c-1986-486e-a6b7-5b1dacc985ee", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-590658377-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.213", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7541d8c77a3f434094bc30a4d402bfcb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ba16494-2d", "ovs_interfaceid": "2ba16494-2db9-4083-9a27-d4f12dac6ba1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1997.601029] env[62405]: DEBUG oslo_vmware.api [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Task: {'id': task-1948023, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1997.617019] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Releasing lock "refresh_cache-6fcfada3-d73a-4814-bf45-d34b26d76d4a" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1997.633984] env[62405]: DEBUG oslo_vmware.api [None req-64c18a5a-7556-4112-a8a5-2d7d9512443f tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': task-1948024, 'name': SuspendVM_Task} progress is 54%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1997.650467] env[62405]: DEBUG oslo_vmware.api [None req-0dabb350-d33b-4e16-91ec-8313dee3edfb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1948025, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.792277} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1997.654019] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-0dabb350-d33b-4e16-91ec-8313dee3edfb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1997.654019] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-0dabb350-d33b-4e16-91ec-8313dee3edfb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 5645c9a4-2640-4190-956f-00fc2ea03a3a] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1997.654019] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-0dabb350-d33b-4e16-91ec-8313dee3edfb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 5645c9a4-2640-4190-956f-00fc2ea03a3a] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1997.654019] env[62405]: INFO nova.compute.manager [None req-0dabb350-d33b-4e16-91ec-8313dee3edfb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 5645c9a4-2640-4190-956f-00fc2ea03a3a] Took 1.72 seconds to destroy the instance on the hypervisor. [ 1997.654019] env[62405]: DEBUG oslo.service.loopingcall [None req-0dabb350-d33b-4e16-91ec-8313dee3edfb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1997.654019] env[62405]: DEBUG nova.compute.manager [-] [instance: 5645c9a4-2640-4190-956f-00fc2ea03a3a] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1997.654019] env[62405]: DEBUG nova.network.neutron [-] [instance: 5645c9a4-2640-4190-956f-00fc2ea03a3a] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1997.696239] env[62405]: DEBUG nova.compute.manager [None req-2c3dedc8-384c-4583-9939-6f8aef706042 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: ca0ff947-1ae0-4f19-ae71-0784f2c20ebe] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1997.696696] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-2c3dedc8-384c-4583-9939-6f8aef706042 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: ca0ff947-1ae0-4f19-ae71-0784f2c20ebe] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1997.698162] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbfb4e08-a7eb-42c9-8120-ea7d18818885 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1997.707698] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c3dedc8-384c-4583-9939-6f8aef706042 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: ca0ff947-1ae0-4f19-ae71-0784f2c20ebe] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1997.707943] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-77075259-9f93-48ce-b3ec-ba9697ed671c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1997.715168] env[62405]: DEBUG oslo_vmware.api [None req-2c3dedc8-384c-4583-9939-6f8aef706042 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Waiting for the task: (returnval){ [ 1997.715168] env[62405]: value = "task-1948026" [ 1997.715168] env[62405]: _type = "Task" [ 1997.715168] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1997.723902] env[62405]: DEBUG oslo_vmware.api [None req-2c3dedc8-384c-4583-9939-6f8aef706042 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1948026, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1997.834501] env[62405]: DEBUG oslo_vmware.api [None req-e6347663-2fdf-4b49-8135-fd101b295081 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1948012, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1998.077176] env[62405]: DEBUG oslo_concurrency.lockutils [req-64fbdacb-f60a-478c-b24b-b042eb68232e req-5a0cba68-36cd-4056-b8c3-4591d8dcf5e0 service nova] Releasing lock "refresh_cache-81d9be97-9147-4754-80c2-68c1a389842e" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1998.099178] env[62405]: DEBUG oslo_vmware.api [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Task: {'id': task-1948023, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.05043} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1998.101072] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 989a7146-71ea-433b-86f9-b7a0f0ee91b4/989a7146-71ea-433b-86f9-b7a0f0ee91b4.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1998.101072] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] [instance: 989a7146-71ea-433b-86f9-b7a0f0ee91b4] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1998.101072] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1d774758-b2cc-4f18-93b6-cde97897d6d0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.108785] env[62405]: DEBUG oslo_vmware.api [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Waiting for the task: (returnval){ [ 1998.108785] env[62405]: value = "task-1948027" [ 1998.108785] env[62405]: _type = "Task" [ 1998.108785] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1998.119037] env[62405]: DEBUG oslo_vmware.api [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Task: {'id': task-1948027, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1998.135988] env[62405]: DEBUG oslo_vmware.api [None req-64c18a5a-7556-4112-a8a5-2d7d9512443f tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': task-1948024, 'name': SuspendVM_Task, 'duration_secs': 1.346366} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1998.135988] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-64c18a5a-7556-4112-a8a5-2d7d9512443f tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Suspended the VM {{(pid=62405) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1998.135988] env[62405]: DEBUG nova.compute.manager [None req-64c18a5a-7556-4112-a8a5-2d7d9512443f tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1998.135988] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c766be1d-db92-434d-8472-86a080eac952 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.240334] env[62405]: DEBUG oslo_vmware.api [None req-2c3dedc8-384c-4583-9939-6f8aef706042 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1948026, 'name': PowerOffVM_Task, 'duration_secs': 0.390226} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1998.240334] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c3dedc8-384c-4583-9939-6f8aef706042 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: ca0ff947-1ae0-4f19-ae71-0784f2c20ebe] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1998.240334] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-2c3dedc8-384c-4583-9939-6f8aef706042 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: ca0ff947-1ae0-4f19-ae71-0784f2c20ebe] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1998.240334] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-93556839-20b4-4317-a490-15128d75505d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.262294] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1998.263455] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d96d7f4c-fe1b-4eba-8b13-c3375daba31e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.273963] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1998.274419] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-01ab8673-1b3b-45b7-9669-2816fa205f66 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.329564] env[62405]: DEBUG oslo_vmware.api [None req-e6347663-2fdf-4b49-8135-fd101b295081 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1948012, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1998.450050] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-2c3dedc8-384c-4583-9939-6f8aef706042 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: ca0ff947-1ae0-4f19-ae71-0784f2c20ebe] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1998.450050] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-2c3dedc8-384c-4583-9939-6f8aef706042 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: ca0ff947-1ae0-4f19-ae71-0784f2c20ebe] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1998.450050] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c3dedc8-384c-4583-9939-6f8aef706042 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Deleting the datastore file [datastore1] ca0ff947-1ae0-4f19-ae71-0784f2c20ebe {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1998.450413] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8a59446a-e037-45a3-9fa3-63a19bb67684 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.458664] env[62405]: DEBUG oslo_vmware.api [None req-2c3dedc8-384c-4583-9939-6f8aef706042 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Waiting for the task: (returnval){ [ 1998.458664] env[62405]: value = "task-1948030" [ 1998.458664] env[62405]: _type = "Task" [ 1998.458664] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1998.466639] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1998.467413] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1998.467478] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Deleting the datastore file [datastore1] 6fcfada3-d73a-4814-bf45-d34b26d76d4a {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1998.469023] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fac528e2-64bf-4ce4-86ee-15e454f75bff {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.475374] env[62405]: DEBUG oslo_vmware.api [None req-2c3dedc8-384c-4583-9939-6f8aef706042 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1948030, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1998.480879] env[62405]: DEBUG oslo_vmware.api [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Waiting for the task: (returnval){ [ 1998.480879] env[62405]: value = "task-1948031" [ 1998.480879] env[62405]: _type = "Task" [ 1998.480879] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1998.490624] env[62405]: DEBUG oslo_vmware.api [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948031, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1998.621541] env[62405]: DEBUG oslo_vmware.api [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Task: {'id': task-1948027, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.086128} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1998.621877] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] [instance: 989a7146-71ea-433b-86f9-b7a0f0ee91b4] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1998.622663] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df6bba71-e9ea-4242-96ad-e2bfbfbd3113 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.647592] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] [instance: 989a7146-71ea-433b-86f9-b7a0f0ee91b4] Reconfiguring VM instance instance-0000006b to attach disk [datastore1] 989a7146-71ea-433b-86f9-b7a0f0ee91b4/989a7146-71ea-433b-86f9-b7a0f0ee91b4.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1998.647592] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8f7a1edc-ee2b-4bd5-8cbd-85a48ff78993 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.678991] env[62405]: DEBUG oslo_vmware.api [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Waiting for the task: (returnval){ [ 1998.678991] env[62405]: value = "task-1948032" [ 1998.678991] env[62405]: _type = "Task" [ 1998.678991] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1998.690914] env[62405]: DEBUG nova.network.neutron [-] [instance: 5645c9a4-2640-4190-956f-00fc2ea03a3a] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1998.695382] env[62405]: DEBUG oslo_vmware.api [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Task: {'id': task-1948032, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1998.729819] env[62405]: DEBUG oslo_concurrency.lockutils [None req-92207ad2-6b2b-4ddd-a538-b0f084ab5682 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquiring lock "ec0a05fc-4a11-4e07-a03c-e357a7a750ab" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1998.730242] env[62405]: DEBUG oslo_concurrency.lockutils [None req-92207ad2-6b2b-4ddd-a538-b0f084ab5682 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Lock "ec0a05fc-4a11-4e07-a03c-e357a7a750ab" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1998.730322] env[62405]: DEBUG oslo_concurrency.lockutils [None req-92207ad2-6b2b-4ddd-a538-b0f084ab5682 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquiring lock "ec0a05fc-4a11-4e07-a03c-e357a7a750ab-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1998.731101] env[62405]: DEBUG oslo_concurrency.lockutils [None req-92207ad2-6b2b-4ddd-a538-b0f084ab5682 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Lock "ec0a05fc-4a11-4e07-a03c-e357a7a750ab-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1998.731101] env[62405]: DEBUG oslo_concurrency.lockutils [None req-92207ad2-6b2b-4ddd-a538-b0f084ab5682 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Lock "ec0a05fc-4a11-4e07-a03c-e357a7a750ab-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1998.732793] env[62405]: INFO nova.compute.manager [None req-92207ad2-6b2b-4ddd-a538-b0f084ab5682 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: ec0a05fc-4a11-4e07-a03c-e357a7a750ab] Terminating instance [ 1998.832792] env[62405]: DEBUG oslo_vmware.api [None req-e6347663-2fdf-4b49-8135-fd101b295081 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1948012, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1998.885513] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c0671d09-689b-4e94-b362-ddf92f1fdc88 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Acquiring lock "c39d9059-8da4-4c8d-99ab-d66b8445e7da" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1998.885848] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c0671d09-689b-4e94-b362-ddf92f1fdc88 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Lock "c39d9059-8da4-4c8d-99ab-d66b8445e7da" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1998.969471] env[62405]: DEBUG oslo_vmware.api [None req-2c3dedc8-384c-4583-9939-6f8aef706042 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1948030, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.226771} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1998.970103] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c3dedc8-384c-4583-9939-6f8aef706042 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1998.970103] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-2c3dedc8-384c-4583-9939-6f8aef706042 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: ca0ff947-1ae0-4f19-ae71-0784f2c20ebe] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1998.970296] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-2c3dedc8-384c-4583-9939-6f8aef706042 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: ca0ff947-1ae0-4f19-ae71-0784f2c20ebe] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1998.970437] env[62405]: INFO nova.compute.manager [None req-2c3dedc8-384c-4583-9939-6f8aef706042 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: ca0ff947-1ae0-4f19-ae71-0784f2c20ebe] Took 1.27 seconds to destroy the instance on the hypervisor. [ 1998.970653] env[62405]: DEBUG oslo.service.loopingcall [None req-2c3dedc8-384c-4583-9939-6f8aef706042 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1998.970848] env[62405]: DEBUG nova.compute.manager [-] [instance: ca0ff947-1ae0-4f19-ae71-0784f2c20ebe] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1998.971046] env[62405]: DEBUG nova.network.neutron [-] [instance: ca0ff947-1ae0-4f19-ae71-0784f2c20ebe] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1998.991368] env[62405]: DEBUG oslo_vmware.api [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948031, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.221599} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1998.991635] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1998.991820] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1998.992011] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1998.999797] env[62405]: DEBUG oslo_concurrency.lockutils [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Acquiring lock "b2eae940-22bc-4c87-842f-30fbd04eba28" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1998.999797] env[62405]: DEBUG oslo_concurrency.lockutils [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Lock "b2eae940-22bc-4c87-842f-30fbd04eba28" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1999.020481] env[62405]: INFO nova.scheduler.client.report [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Deleted allocations for instance 6fcfada3-d73a-4814-bf45-d34b26d76d4a [ 1999.085999] env[62405]: DEBUG nova.compute.manager [req-c3c1b86a-c111-4195-9cd4-e370d4d0d74e req-8915572b-2925-4ab7-a2ed-2a51d815f9eb service nova] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Received event network-vif-unplugged-e84f02c8-cde2-4f59-88cd-ef80e8cc1bba {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1999.085999] env[62405]: DEBUG oslo_concurrency.lockutils [req-c3c1b86a-c111-4195-9cd4-e370d4d0d74e req-8915572b-2925-4ab7-a2ed-2a51d815f9eb service nova] Acquiring lock "6fcfada3-d73a-4814-bf45-d34b26d76d4a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1999.086250] env[62405]: DEBUG oslo_concurrency.lockutils [req-c3c1b86a-c111-4195-9cd4-e370d4d0d74e req-8915572b-2925-4ab7-a2ed-2a51d815f9eb service nova] Lock "6fcfada3-d73a-4814-bf45-d34b26d76d4a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1999.086400] env[62405]: DEBUG oslo_concurrency.lockutils [req-c3c1b86a-c111-4195-9cd4-e370d4d0d74e req-8915572b-2925-4ab7-a2ed-2a51d815f9eb service nova] Lock "6fcfada3-d73a-4814-bf45-d34b26d76d4a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1999.086565] env[62405]: DEBUG nova.compute.manager [req-c3c1b86a-c111-4195-9cd4-e370d4d0d74e req-8915572b-2925-4ab7-a2ed-2a51d815f9eb service nova] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] No waiting events found dispatching network-vif-unplugged-e84f02c8-cde2-4f59-88cd-ef80e8cc1bba {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1999.086731] env[62405]: WARNING nova.compute.manager [req-c3c1b86a-c111-4195-9cd4-e370d4d0d74e req-8915572b-2925-4ab7-a2ed-2a51d815f9eb service nova] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Received unexpected event network-vif-unplugged-e84f02c8-cde2-4f59-88cd-ef80e8cc1bba for instance with vm_state shelved_offloaded and task_state None. [ 1999.086894] env[62405]: DEBUG nova.compute.manager [req-c3c1b86a-c111-4195-9cd4-e370d4d0d74e req-8915572b-2925-4ab7-a2ed-2a51d815f9eb service nova] [instance: 5645c9a4-2640-4190-956f-00fc2ea03a3a] Received event network-vif-deleted-0830bd57-ad4c-4775-9f4b-826fad7b43ed {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1999.087298] env[62405]: DEBUG nova.compute.manager [req-c3c1b86a-c111-4195-9cd4-e370d4d0d74e req-8915572b-2925-4ab7-a2ed-2a51d815f9eb service nova] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Received event network-changed-e84f02c8-cde2-4f59-88cd-ef80e8cc1bba {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 1999.087502] env[62405]: DEBUG nova.compute.manager [req-c3c1b86a-c111-4195-9cd4-e370d4d0d74e req-8915572b-2925-4ab7-a2ed-2a51d815f9eb service nova] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Refreshing instance network info cache due to event network-changed-e84f02c8-cde2-4f59-88cd-ef80e8cc1bba. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 1999.087751] env[62405]: DEBUG oslo_concurrency.lockutils [req-c3c1b86a-c111-4195-9cd4-e370d4d0d74e req-8915572b-2925-4ab7-a2ed-2a51d815f9eb service nova] Acquiring lock "refresh_cache-6fcfada3-d73a-4814-bf45-d34b26d76d4a" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1999.087945] env[62405]: DEBUG oslo_concurrency.lockutils [req-c3c1b86a-c111-4195-9cd4-e370d4d0d74e req-8915572b-2925-4ab7-a2ed-2a51d815f9eb service nova] Acquired lock "refresh_cache-6fcfada3-d73a-4814-bf45-d34b26d76d4a" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1999.088171] env[62405]: DEBUG nova.network.neutron [req-c3c1b86a-c111-4195-9cd4-e370d4d0d74e req-8915572b-2925-4ab7-a2ed-2a51d815f9eb service nova] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Refreshing network info cache for port e84f02c8-cde2-4f59-88cd-ef80e8cc1bba {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1999.190792] env[62405]: DEBUG oslo_vmware.api [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Task: {'id': task-1948032, 'name': ReconfigVM_Task, 'duration_secs': 0.285838} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1999.192638] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] [instance: 989a7146-71ea-433b-86f9-b7a0f0ee91b4] Reconfigured VM instance instance-0000006b to attach disk [datastore1] 989a7146-71ea-433b-86f9-b7a0f0ee91b4/989a7146-71ea-433b-86f9-b7a0f0ee91b4.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1999.192638] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-db6ab3e6-2039-4b2a-99ba-8f6e647e0a11 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1999.194012] env[62405]: INFO nova.compute.manager [-] [instance: 5645c9a4-2640-4190-956f-00fc2ea03a3a] Took 1.54 seconds to deallocate network for instance. [ 1999.211968] env[62405]: DEBUG oslo_vmware.api [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Waiting for the task: (returnval){ [ 1999.211968] env[62405]: value = "task-1948033" [ 1999.211968] env[62405]: _type = "Task" [ 1999.211968] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1999.228413] env[62405]: DEBUG oslo_vmware.api [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Task: {'id': task-1948033, 'name': Rename_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1999.238246] env[62405]: DEBUG nova.compute.manager [None req-92207ad2-6b2b-4ddd-a538-b0f084ab5682 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: ec0a05fc-4a11-4e07-a03c-e357a7a750ab] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1999.238433] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-92207ad2-6b2b-4ddd-a538-b0f084ab5682 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: ec0a05fc-4a11-4e07-a03c-e357a7a750ab] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1999.242042] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-036b2f96-c0ab-4344-b62e-c443701fbaae {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1999.248664] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-92207ad2-6b2b-4ddd-a538-b0f084ab5682 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: ec0a05fc-4a11-4e07-a03c-e357a7a750ab] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1999.248943] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b1f3b05e-1426-4a5a-8282-429c3ed3abde {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1999.255698] env[62405]: DEBUG oslo_vmware.api [None req-92207ad2-6b2b-4ddd-a538-b0f084ab5682 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for the task: (returnval){ [ 1999.255698] env[62405]: value = "task-1948034" [ 1999.255698] env[62405]: _type = "Task" [ 1999.255698] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1999.264837] env[62405]: DEBUG oslo_vmware.api [None req-92207ad2-6b2b-4ddd-a538-b0f084ab5682 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1948034, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1999.335178] env[62405]: DEBUG oslo_vmware.api [None req-e6347663-2fdf-4b49-8135-fd101b295081 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1948012, 'name': ReconfigVM_Task, 'duration_secs': 6.668576} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1999.336222] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e6347663-2fdf-4b49-8135-fd101b295081 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Releasing lock "556e1bca-f2f1-4200-96df-997d48ce5a15" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1999.336596] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-e6347663-2fdf-4b49-8135-fd101b295081 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] Reconfigured VM to detach interface {{(pid=62405) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1999.393868] env[62405]: DEBUG nova.compute.utils [None req-c0671d09-689b-4e94-b362-ddf92f1fdc88 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1999.502253] env[62405]: DEBUG nova.compute.manager [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: b2eae940-22bc-4c87-842f-30fbd04eba28] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1999.528449] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1999.528449] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1999.528449] env[62405]: DEBUG nova.objects.instance [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Lazy-loading 'resources' on Instance uuid 6fcfada3-d73a-4814-bf45-d34b26d76d4a {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1999.707490] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0dabb350-d33b-4e16-91ec-8313dee3edfb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1999.712990] env[62405]: INFO nova.compute.manager [None req-3a7e509b-fa65-43eb-a171-0c41abfc3beb tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Resuming [ 1999.713582] env[62405]: DEBUG nova.objects.instance [None req-3a7e509b-fa65-43eb-a171-0c41abfc3beb tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Lazy-loading 'flavor' on Instance uuid f16e3d13-6db6-4f61-b0e4-661856a9166b {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1999.729360] env[62405]: DEBUG oslo_vmware.api [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Task: {'id': task-1948033, 'name': Rename_Task, 'duration_secs': 0.212729} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1999.729655] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] [instance: 989a7146-71ea-433b-86f9-b7a0f0ee91b4] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1999.731199] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f9f34ce9-39f9-45cf-8052-54cd1860bbc3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1999.739052] env[62405]: DEBUG oslo_vmware.api [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Waiting for the task: (returnval){ [ 1999.739052] env[62405]: value = "task-1948035" [ 1999.739052] env[62405]: _type = "Task" [ 1999.739052] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1999.750532] env[62405]: DEBUG oslo_vmware.api [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Task: {'id': task-1948035, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1999.768489] env[62405]: DEBUG oslo_vmware.api [None req-92207ad2-6b2b-4ddd-a538-b0f084ab5682 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1948034, 'name': PowerOffVM_Task, 'duration_secs': 0.249045} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1999.768767] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-92207ad2-6b2b-4ddd-a538-b0f084ab5682 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: ec0a05fc-4a11-4e07-a03c-e357a7a750ab] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1999.768971] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-92207ad2-6b2b-4ddd-a538-b0f084ab5682 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: ec0a05fc-4a11-4e07-a03c-e357a7a750ab] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1999.769247] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7d0c68de-76ae-460a-ab2d-bfedd9ce488d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1999.844610] env[62405]: DEBUG nova.network.neutron [-] [instance: ca0ff947-1ae0-4f19-ae71-0784f2c20ebe] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1999.898508] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c0671d09-689b-4e94-b362-ddf92f1fdc88 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Lock "c39d9059-8da4-4c8d-99ab-d66b8445e7da" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.012s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1999.902663] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-92207ad2-6b2b-4ddd-a538-b0f084ab5682 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: ec0a05fc-4a11-4e07-a03c-e357a7a750ab] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1999.902868] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-92207ad2-6b2b-4ddd-a538-b0f084ab5682 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: ec0a05fc-4a11-4e07-a03c-e357a7a750ab] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1999.903081] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-92207ad2-6b2b-4ddd-a538-b0f084ab5682 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Deleting the datastore file [datastore1] ec0a05fc-4a11-4e07-a03c-e357a7a750ab {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1999.904010] env[62405]: DEBUG nova.network.neutron [req-c3c1b86a-c111-4195-9cd4-e370d4d0d74e req-8915572b-2925-4ab7-a2ed-2a51d815f9eb service nova] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Updated VIF entry in instance network info cache for port e84f02c8-cde2-4f59-88cd-ef80e8cc1bba. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1999.904374] env[62405]: DEBUG nova.network.neutron [req-c3c1b86a-c111-4195-9cd4-e370d4d0d74e req-8915572b-2925-4ab7-a2ed-2a51d815f9eb service nova] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Updating instance_info_cache with network_info: [{"id": "e84f02c8-cde2-4f59-88cd-ef80e8cc1bba", "address": "fa:16:3e:f1:09:bd", "network": {"id": "feb681f7-0a8f-4000-89ec-88a027ee7f15", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-478938422-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.209", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28cfe90f16b140018a5802c02f751d9c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tape84f02c8-cd", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1999.905411] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7f4aa60f-7a7e-4ac6-aab3-b518147c6836 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1999.913321] env[62405]: DEBUG oslo_vmware.api [None req-92207ad2-6b2b-4ddd-a538-b0f084ab5682 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for the task: (returnval){ [ 1999.913321] env[62405]: value = "task-1948037" [ 1999.913321] env[62405]: _type = "Task" [ 1999.913321] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1999.922301] env[62405]: DEBUG oslo_vmware.api [None req-92207ad2-6b2b-4ddd-a538-b0f084ab5682 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1948037, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2000.036877] env[62405]: DEBUG nova.objects.instance [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Lazy-loading 'numa_topology' on Instance uuid 6fcfada3-d73a-4814-bf45-d34b26d76d4a {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2000.041111] env[62405]: DEBUG oslo_concurrency.lockutils [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2000.249386] env[62405]: DEBUG oslo_vmware.api [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Task: {'id': task-1948035, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2000.347708] env[62405]: INFO nova.compute.manager [-] [instance: ca0ff947-1ae0-4f19-ae71-0784f2c20ebe] Took 1.38 seconds to deallocate network for instance. [ 2000.408891] env[62405]: DEBUG oslo_concurrency.lockutils [req-c3c1b86a-c111-4195-9cd4-e370d4d0d74e req-8915572b-2925-4ab7-a2ed-2a51d815f9eb service nova] Releasing lock "refresh_cache-6fcfada3-d73a-4814-bf45-d34b26d76d4a" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2000.428117] env[62405]: DEBUG oslo_vmware.api [None req-92207ad2-6b2b-4ddd-a538-b0f084ab5682 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1948037, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2000.539220] env[62405]: DEBUG nova.objects.base [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Object Instance<6fcfada3-d73a-4814-bf45-d34b26d76d4a> lazy-loaded attributes: resources,numa_topology {{(pid=62405) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2000.727799] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e6347663-2fdf-4b49-8135-fd101b295081 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquiring lock "refresh_cache-556e1bca-f2f1-4200-96df-997d48ce5a15" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2000.727799] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e6347663-2fdf-4b49-8135-fd101b295081 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquired lock "refresh_cache-556e1bca-f2f1-4200-96df-997d48ce5a15" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2000.728129] env[62405]: DEBUG nova.network.neutron [None req-e6347663-2fdf-4b49-8135-fd101b295081 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2000.752287] env[62405]: DEBUG oslo_vmware.api [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Task: {'id': task-1948035, 'name': PowerOnVM_Task, 'duration_secs': 0.813969} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2000.755368] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] [instance: 989a7146-71ea-433b-86f9-b7a0f0ee91b4] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2000.755664] env[62405]: INFO nova.compute.manager [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] [instance: 989a7146-71ea-433b-86f9-b7a0f0ee91b4] Took 9.35 seconds to spawn the instance on the hypervisor. [ 2000.755912] env[62405]: DEBUG nova.compute.manager [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] [instance: 989a7146-71ea-433b-86f9-b7a0f0ee91b4] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2000.757192] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e9cc7d7-b5d1-48c1-a8e3-3f3ceec89997 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2000.813825] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1243e29-4e13-4611-a481-4d5638d6000e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2000.822501] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-761599b1-17b8-49d6-a6f1-5d3291391b95 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2000.863417] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2c3dedc8-384c-4583-9939-6f8aef706042 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2000.864804] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18aa8296-dbce-495c-8a3c-7747c4b98f24 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2000.874263] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d212015d-b014-4b32-b60d-ed68be62203b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2000.890708] env[62405]: DEBUG nova.compute.provider_tree [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2000.926680] env[62405]: DEBUG oslo_vmware.api [None req-92207ad2-6b2b-4ddd-a538-b0f084ab5682 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Task: {'id': task-1948037, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.720037} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2000.927020] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-92207ad2-6b2b-4ddd-a538-b0f084ab5682 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2000.927514] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-92207ad2-6b2b-4ddd-a538-b0f084ab5682 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: ec0a05fc-4a11-4e07-a03c-e357a7a750ab] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2000.927514] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-92207ad2-6b2b-4ddd-a538-b0f084ab5682 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: ec0a05fc-4a11-4e07-a03c-e357a7a750ab] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2000.927717] env[62405]: INFO nova.compute.manager [None req-92207ad2-6b2b-4ddd-a538-b0f084ab5682 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] [instance: ec0a05fc-4a11-4e07-a03c-e357a7a750ab] Took 1.69 seconds to destroy the instance on the hypervisor. [ 2000.927942] env[62405]: DEBUG oslo.service.loopingcall [None req-92207ad2-6b2b-4ddd-a538-b0f084ab5682 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2000.928183] env[62405]: DEBUG nova.compute.manager [-] [instance: ec0a05fc-4a11-4e07-a03c-e357a7a750ab] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2000.928310] env[62405]: DEBUG nova.network.neutron [-] [instance: ec0a05fc-4a11-4e07-a03c-e357a7a750ab] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2000.962461] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c0671d09-689b-4e94-b362-ddf92f1fdc88 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Acquiring lock "c39d9059-8da4-4c8d-99ab-d66b8445e7da" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2000.962734] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c0671d09-689b-4e94-b362-ddf92f1fdc88 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Lock "c39d9059-8da4-4c8d-99ab-d66b8445e7da" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2000.962963] env[62405]: INFO nova.compute.manager [None req-c0671d09-689b-4e94-b362-ddf92f1fdc88 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Attaching volume 10498adc-afa1-4e8d-87d5-9511db990a6a to /dev/sdb [ 2001.002460] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8e50174-6ad7-4218-a810-8f91fd43228e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2001.010215] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b6f31d8-e4e8-4e2a-a698-6f0d28b1f667 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2001.023556] env[62405]: DEBUG nova.virt.block_device [None req-c0671d09-689b-4e94-b362-ddf92f1fdc88 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Updating existing volume attachment record: 7ab0c928-2595-42bc-b87e-7f101b4c3982 {{(pid=62405) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 2001.111886] env[62405]: DEBUG nova.compute.manager [req-879c17e1-1a34-4784-8ca3-8eda26b6ac3b req-8b01491a-27b0-4746-9654-ef164a8478e5 service nova] [instance: ca0ff947-1ae0-4f19-ae71-0784f2c20ebe] Received event network-vif-deleted-9835277c-1b66-4088-ab4a-9d2b8e7e6266 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2001.233855] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3a7e509b-fa65-43eb-a171-0c41abfc3beb tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Acquiring lock "refresh_cache-f16e3d13-6db6-4f61-b0e4-661856a9166b" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2001.234147] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3a7e509b-fa65-43eb-a171-0c41abfc3beb tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Acquired lock "refresh_cache-f16e3d13-6db6-4f61-b0e4-661856a9166b" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2001.234372] env[62405]: DEBUG nova.network.neutron [None req-3a7e509b-fa65-43eb-a171-0c41abfc3beb tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2001.277802] env[62405]: INFO nova.compute.manager [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] [instance: 989a7146-71ea-433b-86f9-b7a0f0ee91b4] Took 16.29 seconds to build instance. [ 2001.430587] env[62405]: ERROR nova.scheduler.client.report [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [req-85485b3c-b373-4313-ae2f-f168f529bfe9] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7d5eded7-a501-4fa6-b1d3-60e273d555d7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-85485b3c-b373-4313-ae2f-f168f529bfe9"}]} [ 2001.456168] env[62405]: DEBUG nova.scheduler.client.report [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Refreshing inventories for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 2001.478188] env[62405]: DEBUG nova.scheduler.client.report [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Updating ProviderTree inventory for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 2001.478188] env[62405]: DEBUG nova.compute.provider_tree [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2001.497959] env[62405]: DEBUG nova.scheduler.client.report [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Refreshing aggregate associations for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7, aggregates: None {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 2001.524468] env[62405]: DEBUG nova.scheduler.client.report [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Refreshing trait associations for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 2001.621769] env[62405]: INFO nova.network.neutron [None req-e6347663-2fdf-4b49-8135-fd101b295081 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] Port e5b357f2-b442-4514-aa4d-9234dfa04642 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 2001.621769] env[62405]: DEBUG nova.network.neutron [None req-e6347663-2fdf-4b49-8135-fd101b295081 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] Updating instance_info_cache with network_info: [{"id": "d440b728-2371-4e75-bb9f-2330f0318cae", "address": "fa:16:3e:ea:15:ee", "network": {"id": "8e7f7222-48db-4dd5-a9e8-9a6d2b598918", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1945720715-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba9083cddcc24345b6ea5d2cbbbec5ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd440b728-23", "ovs_interfaceid": "d440b728-2371-4e75-bb9f-2330f0318cae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2001.783524] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0649048e-e7ba-4b5b-82f7-9830c9b2c64f tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Lock "989a7146-71ea-433b-86f9-b7a0f0ee91b4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.801s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2001.813495] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7ee99ef-2abd-4398-9cec-e91a27a9911b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2001.824346] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44ed4809-da1d-42d0-b80a-5f9af2a3e8e6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2001.854933] env[62405]: DEBUG oslo_concurrency.lockutils [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Acquiring lock "6fcfada3-d73a-4814-bf45-d34b26d76d4a" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2001.859240] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a744ead-4eca-43fe-89c3-070beaa1b12f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2001.873720] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-469c41d8-a53a-4567-b321-7786793d32d2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2001.893863] env[62405]: DEBUG nova.compute.provider_tree [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2001.964921] env[62405]: DEBUG nova.network.neutron [-] [instance: ec0a05fc-4a11-4e07-a03c-e357a7a750ab] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2002.002797] env[62405]: DEBUG nova.network.neutron [None req-3a7e509b-fa65-43eb-a171-0c41abfc3beb tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Updating instance_info_cache with network_info: [{"id": "dba92750-bf41-4683-b71d-128391ff29d0", "address": "fa:16:3e:e6:78:c4", "network": {"id": "72000fdf-4f7a-4c95-a7ac-d8404249f55c", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-589425764-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "521150d8f23f4f76a0c785481c99e897", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "412cde91-d0f0-4193-b36b-d8b9d17384c6", "external-id": "nsx-vlan-transportzone-461", "segmentation_id": 461, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdba92750-bf", "ovs_interfaceid": "dba92750-bf41-4683-b71d-128391ff29d0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2002.124484] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e6347663-2fdf-4b49-8135-fd101b295081 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Releasing lock "refresh_cache-556e1bca-f2f1-4200-96df-997d48ce5a15" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2002.285695] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a7589e97-3d9e-47ef-81bb-b135b9e223da tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquiring lock "interface-15718289-5c19-4c2d-a9d8-d30ce0d63c68-e5b357f2-b442-4514-aa4d-9234dfa04642" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2002.285982] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a7589e97-3d9e-47ef-81bb-b135b9e223da tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Lock "interface-15718289-5c19-4c2d-a9d8-d30ce0d63c68-e5b357f2-b442-4514-aa4d-9234dfa04642" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2002.286398] env[62405]: DEBUG nova.objects.instance [None req-a7589e97-3d9e-47ef-81bb-b135b9e223da tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Lazy-loading 'flavor' on Instance uuid 15718289-5c19-4c2d-a9d8-d30ce0d63c68 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2002.368360] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b36efc99-ea16-484d-b449-f67f013f967b tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Acquiring lock "989a7146-71ea-433b-86f9-b7a0f0ee91b4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2002.368611] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b36efc99-ea16-484d-b449-f67f013f967b tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Lock "989a7146-71ea-433b-86f9-b7a0f0ee91b4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2002.368826] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b36efc99-ea16-484d-b449-f67f013f967b tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Acquiring lock "989a7146-71ea-433b-86f9-b7a0f0ee91b4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2002.369070] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b36efc99-ea16-484d-b449-f67f013f967b tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Lock "989a7146-71ea-433b-86f9-b7a0f0ee91b4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2002.369257] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b36efc99-ea16-484d-b449-f67f013f967b tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Lock "989a7146-71ea-433b-86f9-b7a0f0ee91b4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2002.371390] env[62405]: INFO nova.compute.manager [None req-b36efc99-ea16-484d-b449-f67f013f967b tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] [instance: 989a7146-71ea-433b-86f9-b7a0f0ee91b4] Terminating instance [ 2002.440992] env[62405]: DEBUG nova.scheduler.client.report [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Updated inventory for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with generation 163 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 2002.441308] env[62405]: DEBUG nova.compute.provider_tree [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Updating resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 generation from 163 to 164 during operation: update_inventory {{(pid=62405) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2002.441489] env[62405]: DEBUG nova.compute.provider_tree [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2002.467487] env[62405]: INFO nova.compute.manager [-] [instance: ec0a05fc-4a11-4e07-a03c-e357a7a750ab] Took 1.54 seconds to deallocate network for instance. [ 2002.505654] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3a7e509b-fa65-43eb-a171-0c41abfc3beb tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Releasing lock "refresh_cache-f16e3d13-6db6-4f61-b0e4-661856a9166b" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2002.506647] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-399bb88b-35fa-4468-86d9-fd0b20ba7ca7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2002.513947] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-3a7e509b-fa65-43eb-a171-0c41abfc3beb tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Resuming the VM {{(pid=62405) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 2002.514250] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-21c5d61f-94e9-4cda-b9d2-668c7eec943c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2002.520699] env[62405]: DEBUG oslo_vmware.api [None req-3a7e509b-fa65-43eb-a171-0c41abfc3beb tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Waiting for the task: (returnval){ [ 2002.520699] env[62405]: value = "task-1948041" [ 2002.520699] env[62405]: _type = "Task" [ 2002.520699] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2002.530090] env[62405]: DEBUG oslo_vmware.api [None req-3a7e509b-fa65-43eb-a171-0c41abfc3beb tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': task-1948041, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2002.629302] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e6347663-2fdf-4b49-8135-fd101b295081 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Lock "interface-556e1bca-f2f1-4200-96df-997d48ce5a15-e5b357f2-b442-4514-aa4d-9234dfa04642" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 10.930s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2002.875210] env[62405]: DEBUG nova.compute.manager [None req-b36efc99-ea16-484d-b449-f67f013f967b tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] [instance: 989a7146-71ea-433b-86f9-b7a0f0ee91b4] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2002.875581] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-b36efc99-ea16-484d-b449-f67f013f967b tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] [instance: 989a7146-71ea-433b-86f9-b7a0f0ee91b4] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2002.876373] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7abea053-c998-4a82-9d5b-365418a54c5a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2002.886760] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-b36efc99-ea16-484d-b449-f67f013f967b tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] [instance: 989a7146-71ea-433b-86f9-b7a0f0ee91b4] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2002.886835] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4db1b816-6184-4903-8025-10585eb9b74f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2002.892031] env[62405]: DEBUG nova.objects.instance [None req-a7589e97-3d9e-47ef-81bb-b135b9e223da tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Lazy-loading 'pci_requests' on Instance uuid 15718289-5c19-4c2d-a9d8-d30ce0d63c68 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2002.894429] env[62405]: DEBUG oslo_vmware.api [None req-b36efc99-ea16-484d-b449-f67f013f967b tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Waiting for the task: (returnval){ [ 2002.894429] env[62405]: value = "task-1948042" [ 2002.894429] env[62405]: _type = "Task" [ 2002.894429] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2002.905513] env[62405]: DEBUG oslo_vmware.api [None req-b36efc99-ea16-484d-b449-f67f013f967b tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Task: {'id': task-1948042, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2002.948171] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.420s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2002.950907] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0dabb350-d33b-4e16-91ec-8313dee3edfb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.244s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2002.952273] env[62405]: DEBUG nova.objects.instance [None req-0dabb350-d33b-4e16-91ec-8313dee3edfb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Lazy-loading 'resources' on Instance uuid 5645c9a4-2640-4190-956f-00fc2ea03a3a {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2002.974858] env[62405]: DEBUG oslo_concurrency.lockutils [None req-92207ad2-6b2b-4ddd-a538-b0f084ab5682 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2003.032948] env[62405]: DEBUG oslo_vmware.api [None req-3a7e509b-fa65-43eb-a171-0c41abfc3beb tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': task-1948041, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2003.205698] env[62405]: DEBUG nova.compute.manager [req-805b244e-dc4f-42d0-af6b-54a67d0f8f88 req-1f1effa6-b8e4-4400-9a54-5f1b036e849d service nova] [instance: ec0a05fc-4a11-4e07-a03c-e357a7a750ab] Received event network-vif-deleted-1336ca88-2020-4b2c-b082-e45e1fe68506 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2003.205920] env[62405]: DEBUG nova.compute.manager [req-805b244e-dc4f-42d0-af6b-54a67d0f8f88 req-1f1effa6-b8e4-4400-9a54-5f1b036e849d service nova] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] Received event network-changed-d440b728-2371-4e75-bb9f-2330f0318cae {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2003.206325] env[62405]: DEBUG nova.compute.manager [req-805b244e-dc4f-42d0-af6b-54a67d0f8f88 req-1f1effa6-b8e4-4400-9a54-5f1b036e849d service nova] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] Refreshing instance network info cache due to event network-changed-d440b728-2371-4e75-bb9f-2330f0318cae. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 2003.206590] env[62405]: DEBUG oslo_concurrency.lockutils [req-805b244e-dc4f-42d0-af6b-54a67d0f8f88 req-1f1effa6-b8e4-4400-9a54-5f1b036e849d service nova] Acquiring lock "refresh_cache-556e1bca-f2f1-4200-96df-997d48ce5a15" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2003.206741] env[62405]: DEBUG oslo_concurrency.lockutils [req-805b244e-dc4f-42d0-af6b-54a67d0f8f88 req-1f1effa6-b8e4-4400-9a54-5f1b036e849d service nova] Acquired lock "refresh_cache-556e1bca-f2f1-4200-96df-997d48ce5a15" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2003.206934] env[62405]: DEBUG nova.network.neutron [req-805b244e-dc4f-42d0-af6b-54a67d0f8f88 req-1f1effa6-b8e4-4400-9a54-5f1b036e849d service nova] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] Refreshing network info cache for port d440b728-2371-4e75-bb9f-2330f0318cae {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2003.396369] env[62405]: DEBUG nova.objects.base [None req-a7589e97-3d9e-47ef-81bb-b135b9e223da tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Object Instance<15718289-5c19-4c2d-a9d8-d30ce0d63c68> lazy-loaded attributes: flavor,pci_requests {{(pid=62405) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2003.396657] env[62405]: DEBUG nova.network.neutron [None req-a7589e97-3d9e-47ef-81bb-b135b9e223da tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2003.413271] env[62405]: DEBUG oslo_vmware.api [None req-b36efc99-ea16-484d-b449-f67f013f967b tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Task: {'id': task-1948042, 'name': PowerOffVM_Task, 'duration_secs': 0.197396} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2003.413588] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-b36efc99-ea16-484d-b449-f67f013f967b tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] [instance: 989a7146-71ea-433b-86f9-b7a0f0ee91b4] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2003.413789] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-b36efc99-ea16-484d-b449-f67f013f967b tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] [instance: 989a7146-71ea-433b-86f9-b7a0f0ee91b4] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2003.414108] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c98598fb-e55c-423f-a116-698598f0f13a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2003.460411] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ee9fc957-fc2d-4c13-8965-7367bc6b1c95 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Lock "6fcfada3-d73a-4814-bf45-d34b26d76d4a" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 26.510s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2003.461411] env[62405]: DEBUG oslo_concurrency.lockutils [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Lock "6fcfada3-d73a-4814-bf45-d34b26d76d4a" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 1.607s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2003.461602] env[62405]: INFO nova.compute.manager [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Unshelving [ 2003.474663] env[62405]: DEBUG nova.policy [None req-a7589e97-3d9e-47ef-81bb-b135b9e223da tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '13540c2dbc2b43bcb151ec7b5894904c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ba9083cddcc24345b6ea5d2cbbbec5ba', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 2003.530056] env[62405]: DEBUG oslo_vmware.api [None req-3a7e509b-fa65-43eb-a171-0c41abfc3beb tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': task-1948041, 'name': PowerOnVM_Task, 'duration_secs': 0.774119} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2003.530370] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-3a7e509b-fa65-43eb-a171-0c41abfc3beb tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Resumed the VM {{(pid=62405) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 2003.530559] env[62405]: DEBUG nova.compute.manager [None req-3a7e509b-fa65-43eb-a171-0c41abfc3beb tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2003.533604] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5a2605f-3090-4557-ba58-321eaec6fffa {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2003.615078] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-b36efc99-ea16-484d-b449-f67f013f967b tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] [instance: 989a7146-71ea-433b-86f9-b7a0f0ee91b4] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2003.615326] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-b36efc99-ea16-484d-b449-f67f013f967b tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] [instance: 989a7146-71ea-433b-86f9-b7a0f0ee91b4] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2003.615518] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-b36efc99-ea16-484d-b449-f67f013f967b tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Deleting the datastore file [datastore1] 989a7146-71ea-433b-86f9-b7a0f0ee91b4 {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2003.615808] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8b0a2e3e-64ce-4daf-a699-7124a97ddbe4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2003.625054] env[62405]: DEBUG oslo_vmware.api [None req-b36efc99-ea16-484d-b449-f67f013f967b tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Waiting for the task: (returnval){ [ 2003.625054] env[62405]: value = "task-1948045" [ 2003.625054] env[62405]: _type = "Task" [ 2003.625054] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2003.632597] env[62405]: DEBUG oslo_vmware.api [None req-b36efc99-ea16-484d-b449-f67f013f967b tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Task: {'id': task-1948045, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2003.717686] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82dec3f5-a50f-440f-a58f-bb91bb0d22b4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2003.725439] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adcdb75e-038b-4d2f-bc1b-b9ee14227584 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2003.758158] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4afee49-35d8-42a9-94bd-09e9f7c157bc {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2003.765595] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c86f5ee6-2d67-4d16-b8d9-5d88232e5de2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2003.780815] env[62405]: DEBUG nova.compute.provider_tree [None req-0dabb350-d33b-4e16-91ec-8313dee3edfb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2003.934956] env[62405]: DEBUG nova.network.neutron [req-805b244e-dc4f-42d0-af6b-54a67d0f8f88 req-1f1effa6-b8e4-4400-9a54-5f1b036e849d service nova] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] Updated VIF entry in instance network info cache for port d440b728-2371-4e75-bb9f-2330f0318cae. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2003.935355] env[62405]: DEBUG nova.network.neutron [req-805b244e-dc4f-42d0-af6b-54a67d0f8f88 req-1f1effa6-b8e4-4400-9a54-5f1b036e849d service nova] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] Updating instance_info_cache with network_info: [{"id": "d440b728-2371-4e75-bb9f-2330f0318cae", "address": "fa:16:3e:ea:15:ee", "network": {"id": "8e7f7222-48db-4dd5-a9e8-9a6d2b598918", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1945720715-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba9083cddcc24345b6ea5d2cbbbec5ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd440b728-23", "ovs_interfaceid": "d440b728-2371-4e75-bb9f-2330f0318cae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2004.134261] env[62405]: DEBUG oslo_vmware.api [None req-b36efc99-ea16-484d-b449-f67f013f967b tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Task: {'id': task-1948045, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.374359} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2004.134531] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-b36efc99-ea16-484d-b449-f67f013f967b tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2004.134715] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-b36efc99-ea16-484d-b449-f67f013f967b tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] [instance: 989a7146-71ea-433b-86f9-b7a0f0ee91b4] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2004.134895] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-b36efc99-ea16-484d-b449-f67f013f967b tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] [instance: 989a7146-71ea-433b-86f9-b7a0f0ee91b4] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2004.135079] env[62405]: INFO nova.compute.manager [None req-b36efc99-ea16-484d-b449-f67f013f967b tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] [instance: 989a7146-71ea-433b-86f9-b7a0f0ee91b4] Took 1.26 seconds to destroy the instance on the hypervisor. [ 2004.135320] env[62405]: DEBUG oslo.service.loopingcall [None req-b36efc99-ea16-484d-b449-f67f013f967b tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2004.135502] env[62405]: DEBUG nova.compute.manager [-] [instance: 989a7146-71ea-433b-86f9-b7a0f0ee91b4] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2004.135594] env[62405]: DEBUG nova.network.neutron [-] [instance: 989a7146-71ea-433b-86f9-b7a0f0ee91b4] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2004.322137] env[62405]: DEBUG nova.scheduler.client.report [None req-0dabb350-d33b-4e16-91ec-8313dee3edfb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Updated inventory for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with generation 164 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 2004.322408] env[62405]: DEBUG nova.compute.provider_tree [None req-0dabb350-d33b-4e16-91ec-8313dee3edfb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Updating resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 generation from 164 to 165 during operation: update_inventory {{(pid=62405) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2004.322714] env[62405]: DEBUG nova.compute.provider_tree [None req-0dabb350-d33b-4e16-91ec-8313dee3edfb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2004.441051] env[62405]: DEBUG oslo_concurrency.lockutils [req-805b244e-dc4f-42d0-af6b-54a67d0f8f88 req-1f1effa6-b8e4-4400-9a54-5f1b036e849d service nova] Releasing lock "refresh_cache-556e1bca-f2f1-4200-96df-997d48ce5a15" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2004.441368] env[62405]: DEBUG nova.compute.manager [req-805b244e-dc4f-42d0-af6b-54a67d0f8f88 req-1f1effa6-b8e4-4400-9a54-5f1b036e849d service nova] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] Received event network-changed-3189d804-1d8d-4356-bbf0-e0bbda0a2d32 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2004.441541] env[62405]: DEBUG nova.compute.manager [req-805b244e-dc4f-42d0-af6b-54a67d0f8f88 req-1f1effa6-b8e4-4400-9a54-5f1b036e849d service nova] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] Refreshing instance network info cache due to event network-changed-3189d804-1d8d-4356-bbf0-e0bbda0a2d32. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 2004.441753] env[62405]: DEBUG oslo_concurrency.lockutils [req-805b244e-dc4f-42d0-af6b-54a67d0f8f88 req-1f1effa6-b8e4-4400-9a54-5f1b036e849d service nova] Acquiring lock "refresh_cache-15718289-5c19-4c2d-a9d8-d30ce0d63c68" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2004.442112] env[62405]: DEBUG oslo_concurrency.lockutils [req-805b244e-dc4f-42d0-af6b-54a67d0f8f88 req-1f1effa6-b8e4-4400-9a54-5f1b036e849d service nova] Acquired lock "refresh_cache-15718289-5c19-4c2d-a9d8-d30ce0d63c68" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2004.442112] env[62405]: DEBUG nova.network.neutron [req-805b244e-dc4f-42d0-af6b-54a67d0f8f88 req-1f1effa6-b8e4-4400-9a54-5f1b036e849d service nova] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] Refreshing network info cache for port 3189d804-1d8d-4356-bbf0-e0bbda0a2d32 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2004.470891] env[62405]: DEBUG nova.compute.utils [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2004.828859] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0dabb350-d33b-4e16-91ec-8313dee3edfb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.877s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2004.831586] env[62405]: DEBUG oslo_concurrency.lockutils [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.790s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2004.832457] env[62405]: INFO nova.compute.claims [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: b2eae940-22bc-4c87-842f-30fbd04eba28] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2004.850084] env[62405]: INFO nova.scheduler.client.report [None req-0dabb350-d33b-4e16-91ec-8313dee3edfb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Deleted allocations for instance 5645c9a4-2640-4190-956f-00fc2ea03a3a [ 2004.876873] env[62405]: DEBUG nova.network.neutron [-] [instance: 989a7146-71ea-433b-86f9-b7a0f0ee91b4] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2004.974338] env[62405]: INFO nova.virt.block_device [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Booting with volume c64ac26e-4f56-4aad-931f-053141f488c8 at /dev/sdb [ 2005.015029] env[62405]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3c103aa4-38a1-466a-86db-665c1923ca39 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2005.026877] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5ff83a2-044e-4b04-9708-65c78443ea5b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2005.062471] env[62405]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ce552372-91cc-4aa9-a61b-3ef8bf035d21 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2005.074338] env[62405]: DEBUG nova.network.neutron [None req-a7589e97-3d9e-47ef-81bb-b135b9e223da tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] Successfully updated port: e5b357f2-b442-4514-aa4d-9234dfa04642 {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2005.080808] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81311fa7-9785-4be0-a417-c49b1104519f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2005.095309] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a7589e97-3d9e-47ef-81bb-b135b9e223da tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquiring lock "refresh_cache-15718289-5c19-4c2d-a9d8-d30ce0d63c68" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2005.118315] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96db8908-48c5-4d19-b8d0-29abe21f541d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2005.127239] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fbfff12-2009-4dee-bbd6-c499349ecaca {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2005.140472] env[62405]: DEBUG nova.virt.block_device [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Updating existing volume attachment record: 39a45a65-ce69-4c19-aa23-f1ef302926f3 {{(pid=62405) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 2005.231890] env[62405]: DEBUG nova.network.neutron [req-805b244e-dc4f-42d0-af6b-54a67d0f8f88 req-1f1effa6-b8e4-4400-9a54-5f1b036e849d service nova] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] Updated VIF entry in instance network info cache for port 3189d804-1d8d-4356-bbf0-e0bbda0a2d32. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2005.232244] env[62405]: DEBUG nova.network.neutron [req-805b244e-dc4f-42d0-af6b-54a67d0f8f88 req-1f1effa6-b8e4-4400-9a54-5f1b036e849d service nova] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] Updating instance_info_cache with network_info: [{"id": "3189d804-1d8d-4356-bbf0-e0bbda0a2d32", "address": "fa:16:3e:b3:8f:fe", "network": {"id": "8e7f7222-48db-4dd5-a9e8-9a6d2b598918", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1945720715-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.152", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba9083cddcc24345b6ea5d2cbbbec5ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3189d804-1d", "ovs_interfaceid": "3189d804-1d8d-4356-bbf0-e0bbda0a2d32", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2005.329187] env[62405]: DEBUG nova.compute.manager [req-189386b0-98fc-45ab-b298-12a213b48442 req-a368bfd7-c8c7-4008-9b77-93dc9cd2f9e3 service nova] [instance: 989a7146-71ea-433b-86f9-b7a0f0ee91b4] Received event network-vif-deleted-c6153e75-b6ea-44d0-8934-10a8a2acaabf {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2005.329469] env[62405]: DEBUG nova.compute.manager [req-189386b0-98fc-45ab-b298-12a213b48442 req-a368bfd7-c8c7-4008-9b77-93dc9cd2f9e3 service nova] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] Received event network-vif-plugged-e5b357f2-b442-4514-aa4d-9234dfa04642 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2005.329706] env[62405]: DEBUG oslo_concurrency.lockutils [req-189386b0-98fc-45ab-b298-12a213b48442 req-a368bfd7-c8c7-4008-9b77-93dc9cd2f9e3 service nova] Acquiring lock "15718289-5c19-4c2d-a9d8-d30ce0d63c68-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2005.330129] env[62405]: DEBUG oslo_concurrency.lockutils [req-189386b0-98fc-45ab-b298-12a213b48442 req-a368bfd7-c8c7-4008-9b77-93dc9cd2f9e3 service nova] Lock "15718289-5c19-4c2d-a9d8-d30ce0d63c68-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2005.330315] env[62405]: DEBUG oslo_concurrency.lockutils [req-189386b0-98fc-45ab-b298-12a213b48442 req-a368bfd7-c8c7-4008-9b77-93dc9cd2f9e3 service nova] Lock "15718289-5c19-4c2d-a9d8-d30ce0d63c68-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2005.330534] env[62405]: DEBUG nova.compute.manager [req-189386b0-98fc-45ab-b298-12a213b48442 req-a368bfd7-c8c7-4008-9b77-93dc9cd2f9e3 service nova] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] No waiting events found dispatching network-vif-plugged-e5b357f2-b442-4514-aa4d-9234dfa04642 {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2005.330815] env[62405]: WARNING nova.compute.manager [req-189386b0-98fc-45ab-b298-12a213b48442 req-a368bfd7-c8c7-4008-9b77-93dc9cd2f9e3 service nova] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] Received unexpected event network-vif-plugged-e5b357f2-b442-4514-aa4d-9234dfa04642 for instance with vm_state active and task_state None. [ 2005.331059] env[62405]: DEBUG nova.compute.manager [req-189386b0-98fc-45ab-b298-12a213b48442 req-a368bfd7-c8c7-4008-9b77-93dc9cd2f9e3 service nova] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] Received event network-changed-e5b357f2-b442-4514-aa4d-9234dfa04642 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2005.331266] env[62405]: DEBUG nova.compute.manager [req-189386b0-98fc-45ab-b298-12a213b48442 req-a368bfd7-c8c7-4008-9b77-93dc9cd2f9e3 service nova] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] Refreshing instance network info cache due to event network-changed-e5b357f2-b442-4514-aa4d-9234dfa04642. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 2005.331501] env[62405]: DEBUG oslo_concurrency.lockutils [req-189386b0-98fc-45ab-b298-12a213b48442 req-a368bfd7-c8c7-4008-9b77-93dc9cd2f9e3 service nova] Acquiring lock "refresh_cache-15718289-5c19-4c2d-a9d8-d30ce0d63c68" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2005.357804] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0dabb350-d33b-4e16-91ec-8313dee3edfb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Lock "5645c9a4-2640-4190-956f-00fc2ea03a3a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.929s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2005.379900] env[62405]: INFO nova.compute.manager [-] [instance: 989a7146-71ea-433b-86f9-b7a0f0ee91b4] Took 1.24 seconds to deallocate network for instance. [ 2005.735474] env[62405]: DEBUG oslo_concurrency.lockutils [req-805b244e-dc4f-42d0-af6b-54a67d0f8f88 req-1f1effa6-b8e4-4400-9a54-5f1b036e849d service nova] Releasing lock "refresh_cache-15718289-5c19-4c2d-a9d8-d30ce0d63c68" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2005.735956] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a7589e97-3d9e-47ef-81bb-b135b9e223da tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquired lock "refresh_cache-15718289-5c19-4c2d-a9d8-d30ce0d63c68" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2005.736180] env[62405]: DEBUG nova.network.neutron [None req-a7589e97-3d9e-47ef-81bb-b135b9e223da tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2005.885920] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b36efc99-ea16-484d-b449-f67f013f967b tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2006.074386] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-c0671d09-689b-4e94-b362-ddf92f1fdc88 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Volume attach. Driver type: vmdk {{(pid=62405) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 2006.074641] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-c0671d09-689b-4e94-b362-ddf92f1fdc88 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-401571', 'volume_id': '10498adc-afa1-4e8d-87d5-9511db990a6a', 'name': 'volume-10498adc-afa1-4e8d-87d5-9511db990a6a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'c39d9059-8da4-4c8d-99ab-d66b8445e7da', 'attached_at': '', 'detached_at': '', 'volume_id': '10498adc-afa1-4e8d-87d5-9511db990a6a', 'serial': '10498adc-afa1-4e8d-87d5-9511db990a6a'} {{(pid=62405) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 2006.075527] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91f95fbb-22c0-4ebf-9792-edb833f9f59b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.097765] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acb3311b-8f2e-4775-ae72-1305d8305bb3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.100760] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbfbafba-f46a-4f9a-b754-70ae6e8cb154 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.125467] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-c0671d09-689b-4e94-b362-ddf92f1fdc88 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Reconfiguring VM instance instance-00000057 to attach disk [datastore1] volume-10498adc-afa1-4e8d-87d5-9511db990a6a/volume-10498adc-afa1-4e8d-87d5-9511db990a6a.vmdk or device None with type thin {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2006.128230] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-16a3563e-6d98-4796-8395-a03c168a0805 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.144924] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31e913a2-a837-4086-8220-a78f567377ca {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.179444] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cb94c2c-98bf-490f-84c0-97bec7794271 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.182528] env[62405]: DEBUG oslo_vmware.api [None req-c0671d09-689b-4e94-b362-ddf92f1fdc88 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for the task: (returnval){ [ 2006.182528] env[62405]: value = "task-1948049" [ 2006.182528] env[62405]: _type = "Task" [ 2006.182528] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2006.189867] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c397ae4d-96a7-4b94-9709-ee470efeb6e3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.196273] env[62405]: DEBUG oslo_vmware.api [None req-c0671d09-689b-4e94-b362-ddf92f1fdc88 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948049, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2006.205698] env[62405]: DEBUG nova.compute.provider_tree [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2006.271068] env[62405]: WARNING nova.network.neutron [None req-a7589e97-3d9e-47ef-81bb-b135b9e223da tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] 8e7f7222-48db-4dd5-a9e8-9a6d2b598918 already exists in list: networks containing: ['8e7f7222-48db-4dd5-a9e8-9a6d2b598918']. ignoring it [ 2006.554026] env[62405]: DEBUG nova.network.neutron [None req-a7589e97-3d9e-47ef-81bb-b135b9e223da tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] Updating instance_info_cache with network_info: [{"id": "3189d804-1d8d-4356-bbf0-e0bbda0a2d32", "address": "fa:16:3e:b3:8f:fe", "network": {"id": "8e7f7222-48db-4dd5-a9e8-9a6d2b598918", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1945720715-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.152", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba9083cddcc24345b6ea5d2cbbbec5ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3189d804-1d", "ovs_interfaceid": "3189d804-1d8d-4356-bbf0-e0bbda0a2d32", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e5b357f2-b442-4514-aa4d-9234dfa04642", "address": "fa:16:3e:75:31:be", "network": {"id": "8e7f7222-48db-4dd5-a9e8-9a6d2b598918", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1945720715-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba9083cddcc24345b6ea5d2cbbbec5ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape5b357f2-b4", "ovs_interfaceid": "e5b357f2-b442-4514-aa4d-9234dfa04642", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2006.695050] env[62405]: DEBUG oslo_vmware.api [None req-c0671d09-689b-4e94-b362-ddf92f1fdc88 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948049, 'name': ReconfigVM_Task, 'duration_secs': 0.329991} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2006.695417] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-c0671d09-689b-4e94-b362-ddf92f1fdc88 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Reconfigured VM instance instance-00000057 to attach disk [datastore1] volume-10498adc-afa1-4e8d-87d5-9511db990a6a/volume-10498adc-afa1-4e8d-87d5-9511db990a6a.vmdk or device None with type thin {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2006.700621] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-77423cd7-9bcf-44ed-8868-f01268d37271 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.710896] env[62405]: DEBUG nova.scheduler.client.report [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2006.719143] env[62405]: DEBUG oslo_vmware.api [None req-c0671d09-689b-4e94-b362-ddf92f1fdc88 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for the task: (returnval){ [ 2006.719143] env[62405]: value = "task-1948050" [ 2006.719143] env[62405]: _type = "Task" [ 2006.719143] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2006.727246] env[62405]: DEBUG oslo_vmware.api [None req-c0671d09-689b-4e94-b362-ddf92f1fdc88 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948050, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2007.057145] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a7589e97-3d9e-47ef-81bb-b135b9e223da tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Releasing lock "refresh_cache-15718289-5c19-4c2d-a9d8-d30ce0d63c68" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2007.057864] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a7589e97-3d9e-47ef-81bb-b135b9e223da tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquiring lock "15718289-5c19-4c2d-a9d8-d30ce0d63c68" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2007.058046] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a7589e97-3d9e-47ef-81bb-b135b9e223da tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquired lock "15718289-5c19-4c2d-a9d8-d30ce0d63c68" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2007.058363] env[62405]: DEBUG oslo_concurrency.lockutils [req-189386b0-98fc-45ab-b298-12a213b48442 req-a368bfd7-c8c7-4008-9b77-93dc9cd2f9e3 service nova] Acquired lock "refresh_cache-15718289-5c19-4c2d-a9d8-d30ce0d63c68" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2007.058543] env[62405]: DEBUG nova.network.neutron [req-189386b0-98fc-45ab-b298-12a213b48442 req-a368bfd7-c8c7-4008-9b77-93dc9cd2f9e3 service nova] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] Refreshing network info cache for port e5b357f2-b442-4514-aa4d-9234dfa04642 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2007.060718] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c6faa36-682a-4324-ac14-70a4e9db5044 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.077392] env[62405]: DEBUG nova.virt.hardware [None req-a7589e97-3d9e-47ef-81bb-b135b9e223da tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2007.077768] env[62405]: DEBUG nova.virt.hardware [None req-a7589e97-3d9e-47ef-81bb-b135b9e223da tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2007.077768] env[62405]: DEBUG nova.virt.hardware [None req-a7589e97-3d9e-47ef-81bb-b135b9e223da tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2007.077949] env[62405]: DEBUG nova.virt.hardware [None req-a7589e97-3d9e-47ef-81bb-b135b9e223da tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2007.078112] env[62405]: DEBUG nova.virt.hardware [None req-a7589e97-3d9e-47ef-81bb-b135b9e223da tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2007.078266] env[62405]: DEBUG nova.virt.hardware [None req-a7589e97-3d9e-47ef-81bb-b135b9e223da tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2007.078469] env[62405]: DEBUG nova.virt.hardware [None req-a7589e97-3d9e-47ef-81bb-b135b9e223da tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2007.078627] env[62405]: DEBUG nova.virt.hardware [None req-a7589e97-3d9e-47ef-81bb-b135b9e223da tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2007.078793] env[62405]: DEBUG nova.virt.hardware [None req-a7589e97-3d9e-47ef-81bb-b135b9e223da tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2007.078956] env[62405]: DEBUG nova.virt.hardware [None req-a7589e97-3d9e-47ef-81bb-b135b9e223da tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2007.079175] env[62405]: DEBUG nova.virt.hardware [None req-a7589e97-3d9e-47ef-81bb-b135b9e223da tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2007.085295] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-a7589e97-3d9e-47ef-81bb-b135b9e223da tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] Reconfiguring VM to attach interface {{(pid=62405) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 2007.086144] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-61803564-57f6-460e-8107-e91a00135477 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.103109] env[62405]: DEBUG oslo_vmware.api [None req-a7589e97-3d9e-47ef-81bb-b135b9e223da tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Waiting for the task: (returnval){ [ 2007.103109] env[62405]: value = "task-1948051" [ 2007.103109] env[62405]: _type = "Task" [ 2007.103109] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2007.110837] env[62405]: DEBUG oslo_vmware.api [None req-a7589e97-3d9e-47ef-81bb-b135b9e223da tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1948051, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2007.216377] env[62405]: DEBUG oslo_concurrency.lockutils [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.385s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2007.216907] env[62405]: DEBUG nova.compute.manager [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: b2eae940-22bc-4c87-842f-30fbd04eba28] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2007.219787] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2c3dedc8-384c-4583-9939-6f8aef706042 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.357s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2007.220019] env[62405]: DEBUG nova.objects.instance [None req-2c3dedc8-384c-4583-9939-6f8aef706042 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Lazy-loading 'resources' on Instance uuid ca0ff947-1ae0-4f19-ae71-0784f2c20ebe {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2007.230495] env[62405]: DEBUG oslo_vmware.api [None req-c0671d09-689b-4e94-b362-ddf92f1fdc88 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948050, 'name': ReconfigVM_Task, 'duration_secs': 0.161909} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2007.230773] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-c0671d09-689b-4e94-b362-ddf92f1fdc88 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-401571', 'volume_id': '10498adc-afa1-4e8d-87d5-9511db990a6a', 'name': 'volume-10498adc-afa1-4e8d-87d5-9511db990a6a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'c39d9059-8da4-4c8d-99ab-d66b8445e7da', 'attached_at': '', 'detached_at': '', 'volume_id': '10498adc-afa1-4e8d-87d5-9511db990a6a', 'serial': '10498adc-afa1-4e8d-87d5-9511db990a6a'} {{(pid=62405) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 2007.263948] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c2adba3b-ce85-4e8e-9483-bd2ae6ec5645 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Acquiring lock "798257f7-0590-4f82-82b0-d428cc6e6e92" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2007.264224] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c2adba3b-ce85-4e8e-9483-bd2ae6ec5645 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Lock "798257f7-0590-4f82-82b0-d428cc6e6e92" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2007.612787] env[62405]: DEBUG oslo_vmware.api [None req-a7589e97-3d9e-47ef-81bb-b135b9e223da tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1948051, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2007.723571] env[62405]: DEBUG nova.compute.utils [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2007.725085] env[62405]: DEBUG nova.compute.manager [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: b2eae940-22bc-4c87-842f-30fbd04eba28] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2007.725261] env[62405]: DEBUG nova.network.neutron [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: b2eae940-22bc-4c87-842f-30fbd04eba28] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2007.767297] env[62405]: DEBUG nova.compute.manager [None req-c2adba3b-ce85-4e8e-9483-bd2ae6ec5645 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 798257f7-0590-4f82-82b0-d428cc6e6e92] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2007.869358] env[62405]: DEBUG nova.policy [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd9a2f1473c194340a88b2a94b70eb754', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4a78c04608454ac88ecb97b4c87a9d17', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 2008.064435] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab0a0252-4bc0-4661-8f33-2f77d3c19870 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2008.072574] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86ceb3b1-33af-45c6-aaf5-444d23626aa3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2008.107899] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68b942bb-8050-4d03-845f-f256d35011c2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2008.115671] env[62405]: DEBUG oslo_vmware.api [None req-a7589e97-3d9e-47ef-81bb-b135b9e223da tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1948051, 'name': ReconfigVM_Task, 'duration_secs': 0.70212} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2008.117943] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a7589e97-3d9e-47ef-81bb-b135b9e223da tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Releasing lock "15718289-5c19-4c2d-a9d8-d30ce0d63c68" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2008.118178] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-a7589e97-3d9e-47ef-81bb-b135b9e223da tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] Reconfigured VM to attach interface {{(pid=62405) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 2008.121760] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb2b15a5-9839-4fc4-95be-477d382d0961 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2008.135780] env[62405]: DEBUG nova.compute.provider_tree [None req-2c3dedc8-384c-4583-9939-6f8aef706042 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2008.228827] env[62405]: DEBUG nova.network.neutron [req-189386b0-98fc-45ab-b298-12a213b48442 req-a368bfd7-c8c7-4008-9b77-93dc9cd2f9e3 service nova] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] Updated VIF entry in instance network info cache for port e5b357f2-b442-4514-aa4d-9234dfa04642. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2008.229363] env[62405]: DEBUG nova.network.neutron [req-189386b0-98fc-45ab-b298-12a213b48442 req-a368bfd7-c8c7-4008-9b77-93dc9cd2f9e3 service nova] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] Updating instance_info_cache with network_info: [{"id": "3189d804-1d8d-4356-bbf0-e0bbda0a2d32", "address": "fa:16:3e:b3:8f:fe", "network": {"id": "8e7f7222-48db-4dd5-a9e8-9a6d2b598918", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1945720715-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.152", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba9083cddcc24345b6ea5d2cbbbec5ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3189d804-1d", "ovs_interfaceid": "3189d804-1d8d-4356-bbf0-e0bbda0a2d32", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e5b357f2-b442-4514-aa4d-9234dfa04642", "address": "fa:16:3e:75:31:be", "network": {"id": "8e7f7222-48db-4dd5-a9e8-9a6d2b598918", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1945720715-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba9083cddcc24345b6ea5d2cbbbec5ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape5b357f2-b4", "ovs_interfaceid": "e5b357f2-b442-4514-aa4d-9234dfa04642", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2008.230952] env[62405]: DEBUG nova.compute.manager [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: b2eae940-22bc-4c87-842f-30fbd04eba28] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2008.279971] env[62405]: DEBUG nova.objects.instance [None req-c0671d09-689b-4e94-b362-ddf92f1fdc88 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Lazy-loading 'flavor' on Instance uuid c39d9059-8da4-4c8d-99ab-d66b8445e7da {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2008.296187] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c2adba3b-ce85-4e8e-9483-bd2ae6ec5645 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2008.350412] env[62405]: DEBUG nova.network.neutron [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: b2eae940-22bc-4c87-842f-30fbd04eba28] Successfully created port: 14628f58-ebd5-4e11-8089-8c15cde335af {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2008.627123] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a7589e97-3d9e-47ef-81bb-b135b9e223da tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Lock "interface-15718289-5c19-4c2d-a9d8-d30ce0d63c68-e5b357f2-b442-4514-aa4d-9234dfa04642" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.341s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2008.643182] env[62405]: DEBUG nova.scheduler.client.report [None req-2c3dedc8-384c-4583-9939-6f8aef706042 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2008.735360] env[62405]: DEBUG oslo_concurrency.lockutils [req-189386b0-98fc-45ab-b298-12a213b48442 req-a368bfd7-c8c7-4008-9b77-93dc9cd2f9e3 service nova] Releasing lock "refresh_cache-15718289-5c19-4c2d-a9d8-d30ce0d63c68" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2008.785649] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c0671d09-689b-4e94-b362-ddf92f1fdc88 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Lock "c39d9059-8da4-4c8d-99ab-d66b8445e7da" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.823s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2009.148067] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2c3dedc8-384c-4583-9939-6f8aef706042 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.928s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2009.151022] env[62405]: DEBUG oslo_concurrency.lockutils [None req-92207ad2-6b2b-4ddd-a538-b0f084ab5682 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.176s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2009.151356] env[62405]: DEBUG oslo_concurrency.lockutils [None req-92207ad2-6b2b-4ddd-a538-b0f084ab5682 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2009.153387] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b36efc99-ea16-484d-b449-f67f013f967b tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.268s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2009.153616] env[62405]: DEBUG nova.objects.instance [None req-b36efc99-ea16-484d-b449-f67f013f967b tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Lazy-loading 'resources' on Instance uuid 989a7146-71ea-433b-86f9-b7a0f0ee91b4 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2009.170987] env[62405]: INFO nova.scheduler.client.report [None req-92207ad2-6b2b-4ddd-a538-b0f084ab5682 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Deleted allocations for instance ec0a05fc-4a11-4e07-a03c-e357a7a750ab [ 2009.172591] env[62405]: INFO nova.scheduler.client.report [None req-2c3dedc8-384c-4583-9939-6f8aef706042 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Deleted allocations for instance ca0ff947-1ae0-4f19-ae71-0784f2c20ebe [ 2009.241078] env[62405]: DEBUG nova.compute.manager [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: b2eae940-22bc-4c87-842f-30fbd04eba28] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2009.266556] env[62405]: DEBUG nova.virt.hardware [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2009.266808] env[62405]: DEBUG nova.virt.hardware [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2009.266968] env[62405]: DEBUG nova.virt.hardware [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2009.267171] env[62405]: DEBUG nova.virt.hardware [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2009.267335] env[62405]: DEBUG nova.virt.hardware [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2009.267491] env[62405]: DEBUG nova.virt.hardware [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2009.267703] env[62405]: DEBUG nova.virt.hardware [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2009.267864] env[62405]: DEBUG nova.virt.hardware [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2009.268047] env[62405]: DEBUG nova.virt.hardware [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2009.268263] env[62405]: DEBUG nova.virt.hardware [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2009.268461] env[62405]: DEBUG nova.virt.hardware [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2009.269371] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2310727-f097-41fa-bd8f-2ee066049602 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.277923] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aba79e29-44cc-40fd-90fc-a628e567c228 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.575673] env[62405]: DEBUG nova.compute.manager [None req-15599116-f656-4d9e-b029-d73ca7b26be0 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Stashing vm_state: active {{(pid=62405) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 2009.686539] env[62405]: DEBUG oslo_concurrency.lockutils [None req-92207ad2-6b2b-4ddd-a538-b0f084ab5682 tempest-DeleteServersTestJSON-1050879801 tempest-DeleteServersTestJSON-1050879801-project-member] Lock "ec0a05fc-4a11-4e07-a03c-e357a7a750ab" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.956s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2009.689387] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2c3dedc8-384c-4583-9939-6f8aef706042 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Lock "ca0ff947-1ae0-4f19-ae71-0784f2c20ebe" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.501s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2009.778581] env[62405]: DEBUG nova.compute.manager [req-0253b43a-7199-41a2-81cc-d9816ab6b4d8 req-499b8169-391a-4413-a93d-00daea0a0661 service nova] [instance: b2eae940-22bc-4c87-842f-30fbd04eba28] Received event network-vif-plugged-14628f58-ebd5-4e11-8089-8c15cde335af {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2009.778814] env[62405]: DEBUG oslo_concurrency.lockutils [req-0253b43a-7199-41a2-81cc-d9816ab6b4d8 req-499b8169-391a-4413-a93d-00daea0a0661 service nova] Acquiring lock "b2eae940-22bc-4c87-842f-30fbd04eba28-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2009.779010] env[62405]: DEBUG oslo_concurrency.lockutils [req-0253b43a-7199-41a2-81cc-d9816ab6b4d8 req-499b8169-391a-4413-a93d-00daea0a0661 service nova] Lock "b2eae940-22bc-4c87-842f-30fbd04eba28-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2009.779252] env[62405]: DEBUG oslo_concurrency.lockutils [req-0253b43a-7199-41a2-81cc-d9816ab6b4d8 req-499b8169-391a-4413-a93d-00daea0a0661 service nova] Lock "b2eae940-22bc-4c87-842f-30fbd04eba28-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2009.779427] env[62405]: DEBUG nova.compute.manager [req-0253b43a-7199-41a2-81cc-d9816ab6b4d8 req-499b8169-391a-4413-a93d-00daea0a0661 service nova] [instance: b2eae940-22bc-4c87-842f-30fbd04eba28] No waiting events found dispatching network-vif-plugged-14628f58-ebd5-4e11-8089-8c15cde335af {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2009.779594] env[62405]: WARNING nova.compute.manager [req-0253b43a-7199-41a2-81cc-d9816ab6b4d8 req-499b8169-391a-4413-a93d-00daea0a0661 service nova] [instance: b2eae940-22bc-4c87-842f-30fbd04eba28] Received unexpected event network-vif-plugged-14628f58-ebd5-4e11-8089-8c15cde335af for instance with vm_state building and task_state spawning. [ 2009.851252] env[62405]: DEBUG oslo_concurrency.lockutils [None req-dbe0dbbf-45a4-4bd8-8208-629704291c1f tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Acquiring lock "f16e3d13-6db6-4f61-b0e4-661856a9166b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2009.851562] env[62405]: DEBUG oslo_concurrency.lockutils [None req-dbe0dbbf-45a4-4bd8-8208-629704291c1f tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Lock "f16e3d13-6db6-4f61-b0e4-661856a9166b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2009.851806] env[62405]: DEBUG oslo_concurrency.lockutils [None req-dbe0dbbf-45a4-4bd8-8208-629704291c1f tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Acquiring lock "f16e3d13-6db6-4f61-b0e4-661856a9166b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2009.852017] env[62405]: DEBUG oslo_concurrency.lockutils [None req-dbe0dbbf-45a4-4bd8-8208-629704291c1f tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Lock "f16e3d13-6db6-4f61-b0e4-661856a9166b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2009.852195] env[62405]: DEBUG oslo_concurrency.lockutils [None req-dbe0dbbf-45a4-4bd8-8208-629704291c1f tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Lock "f16e3d13-6db6-4f61-b0e4-661856a9166b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2009.854282] env[62405]: INFO nova.compute.manager [None req-dbe0dbbf-45a4-4bd8-8208-629704291c1f tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Terminating instance [ 2009.877298] env[62405]: DEBUG nova.network.neutron [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: b2eae940-22bc-4c87-842f-30fbd04eba28] Successfully updated port: 14628f58-ebd5-4e11-8089-8c15cde335af {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2009.925163] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7bc75b3-e269-4a83-a9ee-69dafda678cd {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.932791] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7546ee19-b0b1-472c-9ff9-364b14e83a3f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.974687] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82577ec8-191c-4c8c-afe5-6f63f6c1cce5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.984167] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66554b13-cdb1-4007-8de3-fc2f00833596 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.002663] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a64aee83-2ad5-47b0-847b-35027ca13f3e tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquiring lock "interface-15718289-5c19-4c2d-a9d8-d30ce0d63c68-e5b357f2-b442-4514-aa4d-9234dfa04642" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2010.002910] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a64aee83-2ad5-47b0-847b-35027ca13f3e tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Lock "interface-15718289-5c19-4c2d-a9d8-d30ce0d63c68-e5b357f2-b442-4514-aa4d-9234dfa04642" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2010.006271] env[62405]: DEBUG nova.compute.provider_tree [None req-b36efc99-ea16-484d-b449-f67f013f967b tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2010.094069] env[62405]: DEBUG oslo_concurrency.lockutils [None req-15599116-f656-4d9e-b029-d73ca7b26be0 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2010.145639] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ba0f5f27-3e82-47b1-ad5c-60baefdce733 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Acquiring lock "f269844b-a9b4-40a2-8ba4-a62ee59b4e40" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2010.145903] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ba0f5f27-3e82-47b1-ad5c-60baefdce733 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Lock "f269844b-a9b4-40a2-8ba4-a62ee59b4e40" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2010.146129] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ba0f5f27-3e82-47b1-ad5c-60baefdce733 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Acquiring lock "f269844b-a9b4-40a2-8ba4-a62ee59b4e40-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2010.146320] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ba0f5f27-3e82-47b1-ad5c-60baefdce733 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Lock "f269844b-a9b4-40a2-8ba4-a62ee59b4e40-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2010.146492] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ba0f5f27-3e82-47b1-ad5c-60baefdce733 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Lock "f269844b-a9b4-40a2-8ba4-a62ee59b4e40-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2010.149468] env[62405]: INFO nova.compute.manager [None req-ba0f5f27-3e82-47b1-ad5c-60baefdce733 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: f269844b-a9b4-40a2-8ba4-a62ee59b4e40] Terminating instance [ 2010.361913] env[62405]: DEBUG nova.compute.manager [None req-dbe0dbbf-45a4-4bd8-8208-629704291c1f tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2010.362178] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-dbe0dbbf-45a4-4bd8-8208-629704291c1f tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2010.363069] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38373720-5ed7-49dd-9b00-1d1168a1d924 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.371259] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-dbe0dbbf-45a4-4bd8-8208-629704291c1f tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2010.371497] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9b587941-3904-438e-a1ab-9fcffe17f970 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.377963] env[62405]: DEBUG oslo_vmware.api [None req-dbe0dbbf-45a4-4bd8-8208-629704291c1f tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Waiting for the task: (returnval){ [ 2010.377963] env[62405]: value = "task-1948054" [ 2010.377963] env[62405]: _type = "Task" [ 2010.377963] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2010.381562] env[62405]: DEBUG oslo_concurrency.lockutils [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Acquiring lock "refresh_cache-b2eae940-22bc-4c87-842f-30fbd04eba28" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2010.381701] env[62405]: DEBUG oslo_concurrency.lockutils [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Acquired lock "refresh_cache-b2eae940-22bc-4c87-842f-30fbd04eba28" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2010.381845] env[62405]: DEBUG nova.network.neutron [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: b2eae940-22bc-4c87-842f-30fbd04eba28] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2010.387348] env[62405]: DEBUG oslo_vmware.api [None req-dbe0dbbf-45a4-4bd8-8208-629704291c1f tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': task-1948054, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2010.506881] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a64aee83-2ad5-47b0-847b-35027ca13f3e tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquiring lock "15718289-5c19-4c2d-a9d8-d30ce0d63c68" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2010.507158] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a64aee83-2ad5-47b0-847b-35027ca13f3e tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquired lock "15718289-5c19-4c2d-a9d8-d30ce0d63c68" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2010.508051] env[62405]: DEBUG nova.scheduler.client.report [None req-b36efc99-ea16-484d-b449-f67f013f967b tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2010.512036] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a02e7b0-8db2-470e-beba-dfe877b3470f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.531619] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdc001c4-36e7-4d58-93f7-49682de7414c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.558084] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-a64aee83-2ad5-47b0-847b-35027ca13f3e tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] Reconfiguring VM to detach interface {{(pid=62405) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 2010.558390] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-869dffc5-62be-4100-a018-5daedc41b49a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.576045] env[62405]: DEBUG oslo_vmware.api [None req-a64aee83-2ad5-47b0-847b-35027ca13f3e tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Waiting for the task: (returnval){ [ 2010.576045] env[62405]: value = "task-1948055" [ 2010.576045] env[62405]: _type = "Task" [ 2010.576045] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2010.583949] env[62405]: DEBUG oslo_vmware.api [None req-a64aee83-2ad5-47b0-847b-35027ca13f3e tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1948055, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2010.653904] env[62405]: DEBUG nova.compute.manager [None req-ba0f5f27-3e82-47b1-ad5c-60baefdce733 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: f269844b-a9b4-40a2-8ba4-a62ee59b4e40] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2010.654161] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-ba0f5f27-3e82-47b1-ad5c-60baefdce733 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: f269844b-a9b4-40a2-8ba4-a62ee59b4e40] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2010.655013] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3581b78-2a71-413f-835e-b05364099272 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.662598] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba0f5f27-3e82-47b1-ad5c-60baefdce733 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: f269844b-a9b4-40a2-8ba4-a62ee59b4e40] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2010.662839] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8e416882-a0fe-4097-a239-f5a02aeecb3a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.669728] env[62405]: DEBUG oslo_vmware.api [None req-ba0f5f27-3e82-47b1-ad5c-60baefdce733 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Waiting for the task: (returnval){ [ 2010.669728] env[62405]: value = "task-1948056" [ 2010.669728] env[62405]: _type = "Task" [ 2010.669728] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2010.680165] env[62405]: DEBUG oslo_vmware.api [None req-ba0f5f27-3e82-47b1-ad5c-60baefdce733 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1948056, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2010.743299] env[62405]: DEBUG oslo_concurrency.lockutils [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2010.889494] env[62405]: DEBUG oslo_vmware.api [None req-dbe0dbbf-45a4-4bd8-8208-629704291c1f tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': task-1948054, 'name': PowerOffVM_Task, 'duration_secs': 0.183583} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2010.890133] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-dbe0dbbf-45a4-4bd8-8208-629704291c1f tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2010.891028] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-dbe0dbbf-45a4-4bd8-8208-629704291c1f tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2010.891028] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-04ef8e63-3dbd-413c-8269-4f647b691ad7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.928465] env[62405]: DEBUG nova.network.neutron [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: b2eae940-22bc-4c87-842f-30fbd04eba28] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2010.970924] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-dbe0dbbf-45a4-4bd8-8208-629704291c1f tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2010.971323] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-dbe0dbbf-45a4-4bd8-8208-629704291c1f tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2010.971433] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-dbe0dbbf-45a4-4bd8-8208-629704291c1f tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Deleting the datastore file [datastore1] f16e3d13-6db6-4f61-b0e4-661856a9166b {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2010.971706] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-99a3f586-af7a-4887-94b4-76248fd0cece {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.979298] env[62405]: DEBUG oslo_vmware.api [None req-dbe0dbbf-45a4-4bd8-8208-629704291c1f tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Waiting for the task: (returnval){ [ 2010.979298] env[62405]: value = "task-1948058" [ 2010.979298] env[62405]: _type = "Task" [ 2010.979298] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2010.989392] env[62405]: DEBUG oslo_vmware.api [None req-dbe0dbbf-45a4-4bd8-8208-629704291c1f tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': task-1948058, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2011.017019] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b36efc99-ea16-484d-b449-f67f013f967b tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.863s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2011.020021] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c2adba3b-ce85-4e8e-9483-bd2ae6ec5645 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.723s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2011.020779] env[62405]: INFO nova.compute.claims [None req-c2adba3b-ce85-4e8e-9483-bd2ae6ec5645 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 798257f7-0590-4f82-82b0-d428cc6e6e92] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2011.039798] env[62405]: INFO nova.scheduler.client.report [None req-b36efc99-ea16-484d-b449-f67f013f967b tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Deleted allocations for instance 989a7146-71ea-433b-86f9-b7a0f0ee91b4 [ 2011.082060] env[62405]: DEBUG nova.network.neutron [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: b2eae940-22bc-4c87-842f-30fbd04eba28] Updating instance_info_cache with network_info: [{"id": "14628f58-ebd5-4e11-8089-8c15cde335af", "address": "fa:16:3e:cc:cc:48", "network": {"id": "006b4fbf-fefb-47b8-b2e9-30e8308e87b9", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-369344299-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "4a78c04608454ac88ecb97b4c87a9d17", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6815237d-f565-474d-a3c0-9c675478eb00", "external-id": "nsx-vlan-transportzone-526", "segmentation_id": 526, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap14628f58-eb", "ovs_interfaceid": "14628f58-ebd5-4e11-8089-8c15cde335af", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2011.086724] env[62405]: DEBUG oslo_vmware.api [None req-a64aee83-2ad5-47b0-847b-35027ca13f3e tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1948055, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2011.179318] env[62405]: DEBUG oslo_vmware.api [None req-ba0f5f27-3e82-47b1-ad5c-60baefdce733 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1948056, 'name': PowerOffVM_Task, 'duration_secs': 0.175743} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2011.179662] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba0f5f27-3e82-47b1-ad5c-60baefdce733 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: f269844b-a9b4-40a2-8ba4-a62ee59b4e40] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2011.179777] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-ba0f5f27-3e82-47b1-ad5c-60baefdce733 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: f269844b-a9b4-40a2-8ba4-a62ee59b4e40] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2011.180081] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e6966272-8f34-4b38-818e-dd802d2c4d61 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2011.256716] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-ba0f5f27-3e82-47b1-ad5c-60baefdce733 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: f269844b-a9b4-40a2-8ba4-a62ee59b4e40] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2011.257039] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-ba0f5f27-3e82-47b1-ad5c-60baefdce733 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: f269844b-a9b4-40a2-8ba4-a62ee59b4e40] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2011.257758] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba0f5f27-3e82-47b1-ad5c-60baefdce733 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Deleting the datastore file [datastore1] f269844b-a9b4-40a2-8ba4-a62ee59b4e40 {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2011.257758] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d24fa81e-3dc1-43e3-a25d-4a0ff316eb2b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2011.264469] env[62405]: DEBUG oslo_vmware.api [None req-ba0f5f27-3e82-47b1-ad5c-60baefdce733 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Waiting for the task: (returnval){ [ 2011.264469] env[62405]: value = "task-1948060" [ 2011.264469] env[62405]: _type = "Task" [ 2011.264469] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2011.272648] env[62405]: DEBUG oslo_vmware.api [None req-ba0f5f27-3e82-47b1-ad5c-60baefdce733 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1948060, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2011.326583] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b2e2eb2c-9451-40b7-9754-79d81790569f tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Acquiring lock "14512ed2-9eae-4753-b83c-8c0d0d5d9432" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2011.326855] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b2e2eb2c-9451-40b7-9754-79d81790569f tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Lock "14512ed2-9eae-4753-b83c-8c0d0d5d9432" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2011.489740] env[62405]: DEBUG oslo_vmware.api [None req-dbe0dbbf-45a4-4bd8-8208-629704291c1f tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Task: {'id': task-1948058, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.145924} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2011.490009] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-dbe0dbbf-45a4-4bd8-8208-629704291c1f tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2011.490210] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-dbe0dbbf-45a4-4bd8-8208-629704291c1f tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2011.490387] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-dbe0dbbf-45a4-4bd8-8208-629704291c1f tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2011.490564] env[62405]: INFO nova.compute.manager [None req-dbe0dbbf-45a4-4bd8-8208-629704291c1f tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Took 1.13 seconds to destroy the instance on the hypervisor. [ 2011.490811] env[62405]: DEBUG oslo.service.loopingcall [None req-dbe0dbbf-45a4-4bd8-8208-629704291c1f tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2011.491041] env[62405]: DEBUG nova.compute.manager [-] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2011.491133] env[62405]: DEBUG nova.network.neutron [-] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2011.549973] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b36efc99-ea16-484d-b449-f67f013f967b tempest-ServerAddressesTestJSON-440723467 tempest-ServerAddressesTestJSON-440723467-project-member] Lock "989a7146-71ea-433b-86f9-b7a0f0ee91b4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.181s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2011.586058] env[62405]: DEBUG oslo_vmware.api [None req-a64aee83-2ad5-47b0-847b-35027ca13f3e tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1948055, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2011.587593] env[62405]: DEBUG oslo_concurrency.lockutils [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Releasing lock "refresh_cache-b2eae940-22bc-4c87-842f-30fbd04eba28" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2011.587883] env[62405]: DEBUG nova.compute.manager [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: b2eae940-22bc-4c87-842f-30fbd04eba28] Instance network_info: |[{"id": "14628f58-ebd5-4e11-8089-8c15cde335af", "address": "fa:16:3e:cc:cc:48", "network": {"id": "006b4fbf-fefb-47b8-b2e9-30e8308e87b9", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-369344299-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "4a78c04608454ac88ecb97b4c87a9d17", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6815237d-f565-474d-a3c0-9c675478eb00", "external-id": "nsx-vlan-transportzone-526", "segmentation_id": 526, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap14628f58-eb", "ovs_interfaceid": "14628f58-ebd5-4e11-8089-8c15cde335af", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2011.588275] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: b2eae940-22bc-4c87-842f-30fbd04eba28] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cc:cc:48', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6815237d-f565-474d-a3c0-9c675478eb00', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '14628f58-ebd5-4e11-8089-8c15cde335af', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2011.595520] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Creating folder: Project (4a78c04608454ac88ecb97b4c87a9d17). Parent ref: group-v401284. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2011.596254] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-27638355-55cb-464e-b957-6e8e40873a81 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2011.607756] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Created folder: Project (4a78c04608454ac88ecb97b4c87a9d17) in parent group-v401284. [ 2011.607962] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Creating folder: Instances. Parent ref: group-v401574. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2011.608298] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-00e7cedc-d146-47ef-b833-0666d0828d6b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2011.617666] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Created folder: Instances in parent group-v401574. [ 2011.617932] env[62405]: DEBUG oslo.service.loopingcall [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2011.618143] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b2eae940-22bc-4c87-842f-30fbd04eba28] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2011.618355] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d9244e5f-41a3-4998-9551-b1dfe4ca55a2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2011.637777] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2011.637777] env[62405]: value = "task-1948063" [ 2011.637777] env[62405]: _type = "Task" [ 2011.637777] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2011.647313] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1948063, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2011.776249] env[62405]: DEBUG oslo_vmware.api [None req-ba0f5f27-3e82-47b1-ad5c-60baefdce733 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1948060, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.215906} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2011.776249] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba0f5f27-3e82-47b1-ad5c-60baefdce733 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2011.776249] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-ba0f5f27-3e82-47b1-ad5c-60baefdce733 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: f269844b-a9b4-40a2-8ba4-a62ee59b4e40] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2011.776249] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-ba0f5f27-3e82-47b1-ad5c-60baefdce733 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: f269844b-a9b4-40a2-8ba4-a62ee59b4e40] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2011.776249] env[62405]: INFO nova.compute.manager [None req-ba0f5f27-3e82-47b1-ad5c-60baefdce733 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: f269844b-a9b4-40a2-8ba4-a62ee59b4e40] Took 1.12 seconds to destroy the instance on the hypervisor. [ 2011.776249] env[62405]: DEBUG oslo.service.loopingcall [None req-ba0f5f27-3e82-47b1-ad5c-60baefdce733 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2011.776249] env[62405]: DEBUG nova.compute.manager [-] [instance: f269844b-a9b4-40a2-8ba4-a62ee59b4e40] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2011.776249] env[62405]: DEBUG nova.network.neutron [-] [instance: f269844b-a9b4-40a2-8ba4-a62ee59b4e40] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2011.830274] env[62405]: DEBUG nova.compute.utils [None req-b2e2eb2c-9451-40b7-9754-79d81790569f tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2011.873255] env[62405]: DEBUG nova.compute.manager [req-66fb5754-5f7e-4e9a-96c8-c3ee1f8d7729 req-d4cf41e5-7331-4354-ab5a-5b8e8f8c381b service nova] [instance: b2eae940-22bc-4c87-842f-30fbd04eba28] Received event network-changed-14628f58-ebd5-4e11-8089-8c15cde335af {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2011.873497] env[62405]: DEBUG nova.compute.manager [req-66fb5754-5f7e-4e9a-96c8-c3ee1f8d7729 req-d4cf41e5-7331-4354-ab5a-5b8e8f8c381b service nova] [instance: b2eae940-22bc-4c87-842f-30fbd04eba28] Refreshing instance network info cache due to event network-changed-14628f58-ebd5-4e11-8089-8c15cde335af. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 2011.873741] env[62405]: DEBUG oslo_concurrency.lockutils [req-66fb5754-5f7e-4e9a-96c8-c3ee1f8d7729 req-d4cf41e5-7331-4354-ab5a-5b8e8f8c381b service nova] Acquiring lock "refresh_cache-b2eae940-22bc-4c87-842f-30fbd04eba28" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2011.873932] env[62405]: DEBUG oslo_concurrency.lockutils [req-66fb5754-5f7e-4e9a-96c8-c3ee1f8d7729 req-d4cf41e5-7331-4354-ab5a-5b8e8f8c381b service nova] Acquired lock "refresh_cache-b2eae940-22bc-4c87-842f-30fbd04eba28" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2011.874170] env[62405]: DEBUG nova.network.neutron [req-66fb5754-5f7e-4e9a-96c8-c3ee1f8d7729 req-d4cf41e5-7331-4354-ab5a-5b8e8f8c381b service nova] [instance: b2eae940-22bc-4c87-842f-30fbd04eba28] Refreshing network info cache for port 14628f58-ebd5-4e11-8089-8c15cde335af {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2012.086441] env[62405]: DEBUG oslo_vmware.api [None req-a64aee83-2ad5-47b0-847b-35027ca13f3e tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1948055, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2012.147736] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1948063, 'name': CreateVM_Task, 'duration_secs': 0.384544} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2012.147928] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b2eae940-22bc-4c87-842f-30fbd04eba28] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2012.148685] env[62405]: DEBUG oslo_concurrency.lockutils [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2012.148866] env[62405]: DEBUG oslo_concurrency.lockutils [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2012.149234] env[62405]: DEBUG oslo_concurrency.lockutils [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2012.149530] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ccf53c24-9bf5-4c5c-b8cf-b4d5540dc01f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2012.154314] env[62405]: DEBUG oslo_vmware.api [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Waiting for the task: (returnval){ [ 2012.154314] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]527bf72b-392c-3c5a-2b85-5bd586cdf752" [ 2012.154314] env[62405]: _type = "Task" [ 2012.154314] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2012.164440] env[62405]: DEBUG oslo_vmware.api [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]527bf72b-392c-3c5a-2b85-5bd586cdf752, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2012.303147] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e23373dc-a791-4218-81ca-7aa0487f6e76 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2012.311041] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c6f6316-ecd7-4ed9-a33a-12e53e641894 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2012.342520] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b2e2eb2c-9451-40b7-9754-79d81790569f tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Lock "14512ed2-9eae-4753-b83c-8c0d0d5d9432" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.015s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2012.345048] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3d70e82-e778-4b70-9c3f-ab0ca37c4e06 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2012.353923] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ec2f3b0-5238-485c-aafe-ec8d8392286b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2012.369130] env[62405]: DEBUG nova.compute.provider_tree [None req-c2adba3b-ce85-4e8e-9483-bd2ae6ec5645 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2012.432019] env[62405]: DEBUG nova.network.neutron [-] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2012.577517] env[62405]: DEBUG nova.network.neutron [req-66fb5754-5f7e-4e9a-96c8-c3ee1f8d7729 req-d4cf41e5-7331-4354-ab5a-5b8e8f8c381b service nova] [instance: b2eae940-22bc-4c87-842f-30fbd04eba28] Updated VIF entry in instance network info cache for port 14628f58-ebd5-4e11-8089-8c15cde335af. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2012.577863] env[62405]: DEBUG nova.network.neutron [req-66fb5754-5f7e-4e9a-96c8-c3ee1f8d7729 req-d4cf41e5-7331-4354-ab5a-5b8e8f8c381b service nova] [instance: b2eae940-22bc-4c87-842f-30fbd04eba28] Updating instance_info_cache with network_info: [{"id": "14628f58-ebd5-4e11-8089-8c15cde335af", "address": "fa:16:3e:cc:cc:48", "network": {"id": "006b4fbf-fefb-47b8-b2e9-30e8308e87b9", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-369344299-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "4a78c04608454ac88ecb97b4c87a9d17", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6815237d-f565-474d-a3c0-9c675478eb00", "external-id": "nsx-vlan-transportzone-526", "segmentation_id": 526, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap14628f58-eb", "ovs_interfaceid": "14628f58-ebd5-4e11-8089-8c15cde335af", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2012.588831] env[62405]: DEBUG oslo_vmware.api [None req-a64aee83-2ad5-47b0-847b-35027ca13f3e tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1948055, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2012.665351] env[62405]: DEBUG oslo_vmware.api [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]527bf72b-392c-3c5a-2b85-5bd586cdf752, 'name': SearchDatastore_Task, 'duration_secs': 0.010486} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2012.665504] env[62405]: DEBUG oslo_concurrency.lockutils [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2012.665673] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: b2eae940-22bc-4c87-842f-30fbd04eba28] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2012.665911] env[62405]: DEBUG oslo_concurrency.lockutils [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2012.666069] env[62405]: DEBUG oslo_concurrency.lockutils [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2012.666261] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2012.666528] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3c92f93a-af01-408c-8254-78d914d73d43 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2012.674930] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2012.675135] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2012.675855] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-68da7e10-dbe8-48f8-a5c5-e04691489968 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2012.681391] env[62405]: DEBUG oslo_vmware.api [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Waiting for the task: (returnval){ [ 2012.681391] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5285bc67-69a8-210d-4af6-d29c742e4e26" [ 2012.681391] env[62405]: _type = "Task" [ 2012.681391] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2012.690269] env[62405]: DEBUG oslo_vmware.api [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5285bc67-69a8-210d-4af6-d29c742e4e26, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2012.721871] env[62405]: DEBUG nova.network.neutron [-] [instance: f269844b-a9b4-40a2-8ba4-a62ee59b4e40] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2012.872574] env[62405]: DEBUG nova.scheduler.client.report [None req-c2adba3b-ce85-4e8e-9483-bd2ae6ec5645 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2012.933861] env[62405]: INFO nova.compute.manager [-] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Took 1.44 seconds to deallocate network for instance. [ 2013.083632] env[62405]: DEBUG oslo_concurrency.lockutils [req-66fb5754-5f7e-4e9a-96c8-c3ee1f8d7729 req-d4cf41e5-7331-4354-ab5a-5b8e8f8c381b service nova] Releasing lock "refresh_cache-b2eae940-22bc-4c87-842f-30fbd04eba28" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2013.089673] env[62405]: DEBUG oslo_vmware.api [None req-a64aee83-2ad5-47b0-847b-35027ca13f3e tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1948055, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2013.196309] env[62405]: DEBUG oslo_vmware.api [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5285bc67-69a8-210d-4af6-d29c742e4e26, 'name': SearchDatastore_Task, 'duration_secs': 0.008685} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2013.198023] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3e488a45-6665-4511-badf-470c6ea625ca {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2013.203455] env[62405]: DEBUG oslo_vmware.api [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Waiting for the task: (returnval){ [ 2013.203455] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52447283-5604-9ba7-0a5e-0797c6fb423e" [ 2013.203455] env[62405]: _type = "Task" [ 2013.203455] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2013.213466] env[62405]: DEBUG oslo_vmware.api [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52447283-5604-9ba7-0a5e-0797c6fb423e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2013.224098] env[62405]: INFO nova.compute.manager [-] [instance: f269844b-a9b4-40a2-8ba4-a62ee59b4e40] Took 1.45 seconds to deallocate network for instance. [ 2013.377976] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c2adba3b-ce85-4e8e-9483-bd2ae6ec5645 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.359s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2013.378518] env[62405]: DEBUG nova.compute.manager [None req-c2adba3b-ce85-4e8e-9483-bd2ae6ec5645 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 798257f7-0590-4f82-82b0-d428cc6e6e92] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2013.381399] env[62405]: DEBUG oslo_concurrency.lockutils [None req-15599116-f656-4d9e-b029-d73ca7b26be0 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 3.288s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2013.413627] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b2e2eb2c-9451-40b7-9754-79d81790569f tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Acquiring lock "14512ed2-9eae-4753-b83c-8c0d0d5d9432" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2013.413905] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b2e2eb2c-9451-40b7-9754-79d81790569f tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Lock "14512ed2-9eae-4753-b83c-8c0d0d5d9432" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2013.414163] env[62405]: INFO nova.compute.manager [None req-b2e2eb2c-9451-40b7-9754-79d81790569f tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 14512ed2-9eae-4753-b83c-8c0d0d5d9432] Attaching volume fda23177-96bc-44b3-9d93-40d1d2c7cd79 to /dev/sdb [ 2013.439745] env[62405]: DEBUG oslo_concurrency.lockutils [None req-dbe0dbbf-45a4-4bd8-8208-629704291c1f tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2013.465308] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e8b3f99-9c54-4977-940b-d58487579925 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2013.473769] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-611c989d-89a0-472e-a876-780d7756621c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2013.490172] env[62405]: DEBUG nova.virt.block_device [None req-b2e2eb2c-9451-40b7-9754-79d81790569f tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 14512ed2-9eae-4753-b83c-8c0d0d5d9432] Updating existing volume attachment record: 81211719-ad1b-41dd-8529-e21a3e174ab3 {{(pid=62405) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 2013.588629] env[62405]: DEBUG oslo_vmware.api [None req-a64aee83-2ad5-47b0-847b-35027ca13f3e tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1948055, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2013.718124] env[62405]: DEBUG oslo_vmware.api [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52447283-5604-9ba7-0a5e-0797c6fb423e, 'name': SearchDatastore_Task, 'duration_secs': 0.009571} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2013.718124] env[62405]: DEBUG oslo_concurrency.lockutils [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2013.718124] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] b2eae940-22bc-4c87-842f-30fbd04eba28/b2eae940-22bc-4c87-842f-30fbd04eba28.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2013.718124] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e5892948-969d-43bd-a3b9-e52473c16fed {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2013.722186] env[62405]: DEBUG oslo_vmware.api [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Waiting for the task: (returnval){ [ 2013.722186] env[62405]: value = "task-1948065" [ 2013.722186] env[62405]: _type = "Task" [ 2013.722186] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2013.731356] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ba0f5f27-3e82-47b1-ad5c-60baefdce733 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2013.733019] env[62405]: DEBUG oslo_vmware.api [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Task: {'id': task-1948065, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2013.814959] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4538537c-3abe-4bcb-bcf7-3a4f2d9349bb tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Acquiring lock "b495f9e6-60c8-4509-a34f-2e7ed59b6d82" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2013.814959] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4538537c-3abe-4bcb-bcf7-3a4f2d9349bb tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Lock "b495f9e6-60c8-4509-a34f-2e7ed59b6d82" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2013.815187] env[62405]: INFO nova.compute.manager [None req-4538537c-3abe-4bcb-bcf7-3a4f2d9349bb tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Rebooting instance [ 2013.887267] env[62405]: DEBUG nova.compute.utils [None req-c2adba3b-ce85-4e8e-9483-bd2ae6ec5645 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2013.891847] env[62405]: INFO nova.compute.claims [None req-15599116-f656-4d9e-b029-d73ca7b26be0 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2013.896430] env[62405]: DEBUG nova.compute.manager [None req-c2adba3b-ce85-4e8e-9483-bd2ae6ec5645 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 798257f7-0590-4f82-82b0-d428cc6e6e92] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2013.896678] env[62405]: DEBUG nova.network.neutron [None req-c2adba3b-ce85-4e8e-9483-bd2ae6ec5645 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 798257f7-0590-4f82-82b0-d428cc6e6e92] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2013.938231] env[62405]: DEBUG nova.compute.manager [req-64eaf104-7014-403e-ba1e-384b61ea5200 req-f99b60aa-85dd-4e8f-b964-511dfa910bed service nova] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Received event network-vif-deleted-dba92750-bf41-4683-b71d-128391ff29d0 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2013.940362] env[62405]: DEBUG nova.compute.manager [req-64eaf104-7014-403e-ba1e-384b61ea5200 req-f99b60aa-85dd-4e8f-b964-511dfa910bed service nova] [instance: f269844b-a9b4-40a2-8ba4-a62ee59b4e40] Received event network-vif-deleted-00aa4b00-fea2-4a08-bb0e-29da525135b9 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2013.993049] env[62405]: DEBUG nova.policy [None req-c2adba3b-ce85-4e8e-9483-bd2ae6ec5645 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4ab5cc5829014c4ebafbf88400b22a8c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5ba2fba100b943a2a415ec37b9365388', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 2014.098511] env[62405]: DEBUG oslo_vmware.api [None req-a64aee83-2ad5-47b0-847b-35027ca13f3e tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1948055, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2014.235654] env[62405]: DEBUG oslo_vmware.api [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Task: {'id': task-1948065, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.46519} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2014.237244] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] b2eae940-22bc-4c87-842f-30fbd04eba28/b2eae940-22bc-4c87-842f-30fbd04eba28.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2014.237635] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: b2eae940-22bc-4c87-842f-30fbd04eba28] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2014.238315] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-79f0b208-368c-41d6-9f09-c589aa38f926 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.245052] env[62405]: DEBUG oslo_vmware.api [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Waiting for the task: (returnval){ [ 2014.245052] env[62405]: value = "task-1948066" [ 2014.245052] env[62405]: _type = "Task" [ 2014.245052] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2014.254145] env[62405]: DEBUG oslo_vmware.api [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Task: {'id': task-1948066, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2014.344590] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4538537c-3abe-4bcb-bcf7-3a4f2d9349bb tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Acquiring lock "refresh_cache-b495f9e6-60c8-4509-a34f-2e7ed59b6d82" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2014.344793] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4538537c-3abe-4bcb-bcf7-3a4f2d9349bb tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Acquired lock "refresh_cache-b495f9e6-60c8-4509-a34f-2e7ed59b6d82" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2014.344972] env[62405]: DEBUG nova.network.neutron [None req-4538537c-3abe-4bcb-bcf7-3a4f2d9349bb tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2014.346826] env[62405]: DEBUG nova.network.neutron [None req-c2adba3b-ce85-4e8e-9483-bd2ae6ec5645 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 798257f7-0590-4f82-82b0-d428cc6e6e92] Successfully created port: ff462cd1-7fb0-4546-92d9-d317279b4c21 {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2014.399731] env[62405]: DEBUG nova.compute.manager [None req-c2adba3b-ce85-4e8e-9483-bd2ae6ec5645 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 798257f7-0590-4f82-82b0-d428cc6e6e92] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2014.404278] env[62405]: INFO nova.compute.resource_tracker [None req-15599116-f656-4d9e-b029-d73ca7b26be0 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Updating resource usage from migration 49367a81-108f-4418-8e83-5976f32abae1 [ 2014.591706] env[62405]: DEBUG oslo_vmware.api [None req-a64aee83-2ad5-47b0-847b-35027ca13f3e tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1948055, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2014.716600] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d4824c1-78ce-46d0-9b3e-2fcaea37e092 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.724598] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44818c77-d3de-41c9-a3da-a24b850020ce {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.758596] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-705a24a5-17f2-4f88-aae2-a861a28cb16f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.765992] env[62405]: DEBUG oslo_vmware.api [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Task: {'id': task-1948066, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062589} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2014.771414] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: b2eae940-22bc-4c87-842f-30fbd04eba28] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2014.771414] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e62c55e2-0e0d-4017-8dac-c8bed90f10fe {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.774330] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aefa0adc-38ad-467f-8742-824309834217 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.813818] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: b2eae940-22bc-4c87-842f-30fbd04eba28] Reconfiguring VM instance instance-0000006c to attach disk [datastore1] b2eae940-22bc-4c87-842f-30fbd04eba28/b2eae940-22bc-4c87-842f-30fbd04eba28.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2014.815025] env[62405]: DEBUG nova.compute.provider_tree [None req-15599116-f656-4d9e-b029-d73ca7b26be0 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2014.816315] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d6381ec6-cc24-4236-9396-337d4971ad48 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.838295] env[62405]: DEBUG oslo_vmware.api [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Waiting for the task: (returnval){ [ 2014.838295] env[62405]: value = "task-1948067" [ 2014.838295] env[62405]: _type = "Task" [ 2014.838295] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2014.847750] env[62405]: DEBUG oslo_vmware.api [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Task: {'id': task-1948067, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2015.092429] env[62405]: DEBUG oslo_vmware.api [None req-a64aee83-2ad5-47b0-847b-35027ca13f3e tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1948055, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2015.166712] env[62405]: DEBUG nova.network.neutron [None req-4538537c-3abe-4bcb-bcf7-3a4f2d9349bb tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Updating instance_info_cache with network_info: [{"id": "a7c7d269-027f-42d9-819a-e04ab445d816", "address": "fa:16:3e:c7:9c:e0", "network": {"id": "f9883b88-e1ff-4499-9214-4cc672383a10", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1580742860-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dd9a1a4650b34e388c50c7575cf09a7c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0a88b707-352e-4be7-b1d6-ad6074b40ed9", "external-id": "nsx-vlan-transportzone-789", "segmentation_id": 789, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa7c7d269-02", "ovs_interfaceid": "a7c7d269-027f-42d9-819a-e04ab445d816", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2015.333183] env[62405]: DEBUG nova.scheduler.client.report [None req-15599116-f656-4d9e-b029-d73ca7b26be0 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2015.348512] env[62405]: DEBUG oslo_vmware.api [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Task: {'id': task-1948067, 'name': ReconfigVM_Task, 'duration_secs': 0.268917} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2015.349242] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: b2eae940-22bc-4c87-842f-30fbd04eba28] Reconfigured VM instance instance-0000006c to attach disk [datastore1] b2eae940-22bc-4c87-842f-30fbd04eba28/b2eae940-22bc-4c87-842f-30fbd04eba28.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2015.350422] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-67f5402d-4882-4464-9dab-c1b53e37b466 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.357181] env[62405]: DEBUG oslo_vmware.api [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Waiting for the task: (returnval){ [ 2015.357181] env[62405]: value = "task-1948068" [ 2015.357181] env[62405]: _type = "Task" [ 2015.357181] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2015.367303] env[62405]: DEBUG oslo_vmware.api [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Task: {'id': task-1948068, 'name': Rename_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2015.417675] env[62405]: DEBUG nova.compute.manager [None req-c2adba3b-ce85-4e8e-9483-bd2ae6ec5645 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 798257f7-0590-4f82-82b0-d428cc6e6e92] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2015.486679] env[62405]: DEBUG nova.virt.hardware [None req-c2adba3b-ce85-4e8e-9483-bd2ae6ec5645 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2015.486925] env[62405]: DEBUG nova.virt.hardware [None req-c2adba3b-ce85-4e8e-9483-bd2ae6ec5645 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2015.487097] env[62405]: DEBUG nova.virt.hardware [None req-c2adba3b-ce85-4e8e-9483-bd2ae6ec5645 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2015.487285] env[62405]: DEBUG nova.virt.hardware [None req-c2adba3b-ce85-4e8e-9483-bd2ae6ec5645 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2015.487434] env[62405]: DEBUG nova.virt.hardware [None req-c2adba3b-ce85-4e8e-9483-bd2ae6ec5645 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2015.487590] env[62405]: DEBUG nova.virt.hardware [None req-c2adba3b-ce85-4e8e-9483-bd2ae6ec5645 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2015.488208] env[62405]: DEBUG nova.virt.hardware [None req-c2adba3b-ce85-4e8e-9483-bd2ae6ec5645 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2015.488445] env[62405]: DEBUG nova.virt.hardware [None req-c2adba3b-ce85-4e8e-9483-bd2ae6ec5645 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2015.488641] env[62405]: DEBUG nova.virt.hardware [None req-c2adba3b-ce85-4e8e-9483-bd2ae6ec5645 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2015.488823] env[62405]: DEBUG nova.virt.hardware [None req-c2adba3b-ce85-4e8e-9483-bd2ae6ec5645 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2015.489361] env[62405]: DEBUG nova.virt.hardware [None req-c2adba3b-ce85-4e8e-9483-bd2ae6ec5645 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2015.490467] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46748dfb-06f1-409e-ab1f-59ca3c4a5f3a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.498468] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41f9c955-721b-4b25-afc0-1086e38be02d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.594875] env[62405]: DEBUG oslo_vmware.api [None req-a64aee83-2ad5-47b0-847b-35027ca13f3e tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1948055, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2015.669363] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4538537c-3abe-4bcb-bcf7-3a4f2d9349bb tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Releasing lock "refresh_cache-b495f9e6-60c8-4509-a34f-2e7ed59b6d82" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2015.841021] env[62405]: DEBUG oslo_concurrency.lockutils [None req-15599116-f656-4d9e-b029-d73ca7b26be0 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.457s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2015.841021] env[62405]: INFO nova.compute.manager [None req-15599116-f656-4d9e-b029-d73ca7b26be0 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Migrating [ 2015.845901] env[62405]: DEBUG oslo_concurrency.lockutils [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.103s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2015.846347] env[62405]: DEBUG nova.objects.instance [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Lazy-loading 'pci_requests' on Instance uuid 6fcfada3-d73a-4814-bf45-d34b26d76d4a {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2015.869708] env[62405]: DEBUG oslo_vmware.api [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Task: {'id': task-1948068, 'name': Rename_Task, 'duration_secs': 0.146269} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2015.872205] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: b2eae940-22bc-4c87-842f-30fbd04eba28] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2015.872205] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-34623cee-a860-46d2-9cf7-ea5988c3e6f0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.879168] env[62405]: DEBUG oslo_vmware.api [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Waiting for the task: (returnval){ [ 2015.879168] env[62405]: value = "task-1948070" [ 2015.879168] env[62405]: _type = "Task" [ 2015.879168] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2015.889159] env[62405]: DEBUG oslo_vmware.api [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Task: {'id': task-1948070, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2016.097557] env[62405]: DEBUG oslo_vmware.api [None req-a64aee83-2ad5-47b0-847b-35027ca13f3e tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1948055, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2016.176570] env[62405]: DEBUG nova.compute.manager [None req-4538537c-3abe-4bcb-bcf7-3a4f2d9349bb tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2016.178774] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae3f23d8-6f48-43c7-b5e7-17c68cd22f59 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2016.183462] env[62405]: DEBUG nova.compute.manager [req-7df3d61c-315c-4848-b795-7de55a3551a4 req-8ffdb158-6155-4dec-9a6d-317dcf602ce2 service nova] [instance: 798257f7-0590-4f82-82b0-d428cc6e6e92] Received event network-vif-plugged-ff462cd1-7fb0-4546-92d9-d317279b4c21 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2016.183462] env[62405]: DEBUG oslo_concurrency.lockutils [req-7df3d61c-315c-4848-b795-7de55a3551a4 req-8ffdb158-6155-4dec-9a6d-317dcf602ce2 service nova] Acquiring lock "798257f7-0590-4f82-82b0-d428cc6e6e92-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2016.183462] env[62405]: DEBUG oslo_concurrency.lockutils [req-7df3d61c-315c-4848-b795-7de55a3551a4 req-8ffdb158-6155-4dec-9a6d-317dcf602ce2 service nova] Lock "798257f7-0590-4f82-82b0-d428cc6e6e92-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2016.184531] env[62405]: DEBUG oslo_concurrency.lockutils [req-7df3d61c-315c-4848-b795-7de55a3551a4 req-8ffdb158-6155-4dec-9a6d-317dcf602ce2 service nova] Lock "798257f7-0590-4f82-82b0-d428cc6e6e92-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2016.184531] env[62405]: DEBUG nova.compute.manager [req-7df3d61c-315c-4848-b795-7de55a3551a4 req-8ffdb158-6155-4dec-9a6d-317dcf602ce2 service nova] [instance: 798257f7-0590-4f82-82b0-d428cc6e6e92] No waiting events found dispatching network-vif-plugged-ff462cd1-7fb0-4546-92d9-d317279b4c21 {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2016.184711] env[62405]: WARNING nova.compute.manager [req-7df3d61c-315c-4848-b795-7de55a3551a4 req-8ffdb158-6155-4dec-9a6d-317dcf602ce2 service nova] [instance: 798257f7-0590-4f82-82b0-d428cc6e6e92] Received unexpected event network-vif-plugged-ff462cd1-7fb0-4546-92d9-d317279b4c21 for instance with vm_state building and task_state spawning. [ 2016.360368] env[62405]: DEBUG nova.objects.instance [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Lazy-loading 'numa_topology' on Instance uuid 6fcfada3-d73a-4814-bf45-d34b26d76d4a {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2016.362033] env[62405]: DEBUG oslo_concurrency.lockutils [None req-15599116-f656-4d9e-b029-d73ca7b26be0 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Acquiring lock "refresh_cache-c39d9059-8da4-4c8d-99ab-d66b8445e7da" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2016.362033] env[62405]: DEBUG oslo_concurrency.lockutils [None req-15599116-f656-4d9e-b029-d73ca7b26be0 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Acquired lock "refresh_cache-c39d9059-8da4-4c8d-99ab-d66b8445e7da" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2016.362033] env[62405]: DEBUG nova.network.neutron [None req-15599116-f656-4d9e-b029-d73ca7b26be0 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2016.367333] env[62405]: DEBUG nova.network.neutron [None req-c2adba3b-ce85-4e8e-9483-bd2ae6ec5645 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 798257f7-0590-4f82-82b0-d428cc6e6e92] Successfully updated port: ff462cd1-7fb0-4546-92d9-d317279b4c21 {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2016.389094] env[62405]: DEBUG oslo_vmware.api [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Task: {'id': task-1948070, 'name': PowerOnVM_Task, 'duration_secs': 0.445108} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2016.389470] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: b2eae940-22bc-4c87-842f-30fbd04eba28] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2016.389661] env[62405]: INFO nova.compute.manager [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: b2eae940-22bc-4c87-842f-30fbd04eba28] Took 7.15 seconds to spawn the instance on the hypervisor. [ 2016.389924] env[62405]: DEBUG nova.compute.manager [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: b2eae940-22bc-4c87-842f-30fbd04eba28] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2016.391210] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f8b4ff6-a826-4011-9e44-89d9f88bef96 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2016.598020] env[62405]: DEBUG oslo_vmware.api [None req-a64aee83-2ad5-47b0-847b-35027ca13f3e tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1948055, 'name': ReconfigVM_Task, 'duration_secs': 5.825166} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2016.598020] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a64aee83-2ad5-47b0-847b-35027ca13f3e tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Releasing lock "15718289-5c19-4c2d-a9d8-d30ce0d63c68" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2016.598020] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-a64aee83-2ad5-47b0-847b-35027ca13f3e tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] Reconfigured VM to detach interface {{(pid=62405) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 2016.864957] env[62405]: INFO nova.compute.claims [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2016.876889] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c2adba3b-ce85-4e8e-9483-bd2ae6ec5645 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Acquiring lock "refresh_cache-798257f7-0590-4f82-82b0-d428cc6e6e92" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2016.878427] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c2adba3b-ce85-4e8e-9483-bd2ae6ec5645 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Acquired lock "refresh_cache-798257f7-0590-4f82-82b0-d428cc6e6e92" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2016.879135] env[62405]: DEBUG nova.network.neutron [None req-c2adba3b-ce85-4e8e-9483-bd2ae6ec5645 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 798257f7-0590-4f82-82b0-d428cc6e6e92] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2016.915030] env[62405]: INFO nova.compute.manager [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: b2eae940-22bc-4c87-842f-30fbd04eba28] Took 16.89 seconds to build instance. [ 2017.205658] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f93d6c3f-0179-4fb7-820b-067658347c18 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.219628] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-4538537c-3abe-4bcb-bcf7-3a4f2d9349bb tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Doing hard reboot of VM {{(pid=62405) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 2017.219628] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-d4b69164-54e2-4d80-9aea-a533e1c7721a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.229096] env[62405]: DEBUG oslo_vmware.api [None req-4538537c-3abe-4bcb-bcf7-3a4f2d9349bb tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Waiting for the task: (returnval){ [ 2017.229096] env[62405]: value = "task-1948071" [ 2017.229096] env[62405]: _type = "Task" [ 2017.229096] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2017.238590] env[62405]: DEBUG oslo_vmware.api [None req-4538537c-3abe-4bcb-bcf7-3a4f2d9349bb tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948071, 'name': ResetVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2017.417260] env[62405]: DEBUG oslo_concurrency.lockutils [None req-24d9b671-7d72-420c-bafa-c7761e00848a tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Lock "b2eae940-22bc-4c87-842f-30fbd04eba28" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.418s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2017.432196] env[62405]: DEBUG nova.network.neutron [None req-c2adba3b-ce85-4e8e-9483-bd2ae6ec5645 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 798257f7-0590-4f82-82b0-d428cc6e6e92] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2017.591625] env[62405]: DEBUG nova.network.neutron [None req-15599116-f656-4d9e-b029-d73ca7b26be0 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Updating instance_info_cache with network_info: [{"id": "2026016a-87b1-42ae-a04f-d95c5fb37377", "address": "fa:16:3e:bc:e8:85", "network": {"id": "3ca0a5a2-a18f-47fa-9d7b-0e27aa414878", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1312490542-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.142", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f3b50cc219314108945bfc8b2c21849a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7edb7c08-2fae-4df5-9ec6-5ccf06d7e337", "external-id": "nsx-vlan-transportzone-309", "segmentation_id": 309, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2026016a-87", "ovs_interfaceid": "2026016a-87b1-42ae-a04f-d95c5fb37377", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2017.604547] env[62405]: DEBUG nova.network.neutron [None req-c2adba3b-ce85-4e8e-9483-bd2ae6ec5645 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 798257f7-0590-4f82-82b0-d428cc6e6e92] Updating instance_info_cache with network_info: [{"id": "ff462cd1-7fb0-4546-92d9-d317279b4c21", "address": "fa:16:3e:d9:55:da", "network": {"id": "c3707aa8-0488-4e47-a655-7bb7788995c0", "bridge": "br-int", "label": "tempest-ServersTestJSON-2054002489-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ba2fba100b943a2a415ec37b9365388", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "68ec9c06-8680-4a41-abad-cddbd1f768c9", "external-id": "nsx-vlan-transportzone-883", "segmentation_id": 883, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff462cd1-7f", "ovs_interfaceid": "ff462cd1-7fb0-4546-92d9-d317279b4c21", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2017.742123] env[62405]: DEBUG oslo_vmware.api [None req-4538537c-3abe-4bcb-bcf7-3a4f2d9349bb tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948071, 'name': ResetVM_Task, 'duration_secs': 0.092984} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2017.742123] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-4538537c-3abe-4bcb-bcf7-3a4f2d9349bb tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Did hard reboot of VM {{(pid=62405) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 2017.742206] env[62405]: DEBUG nova.compute.manager [None req-4538537c-3abe-4bcb-bcf7-3a4f2d9349bb tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2017.743229] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f13989ef-7de6-4ff8-95e4-a0e2562fed40 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.060517] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-b2e2eb2c-9451-40b7-9754-79d81790569f tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 14512ed2-9eae-4753-b83c-8c0d0d5d9432] Volume attach. Driver type: vmdk {{(pid=62405) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 2018.061946] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-b2e2eb2c-9451-40b7-9754-79d81790569f tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 14512ed2-9eae-4753-b83c-8c0d0d5d9432] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-401577', 'volume_id': 'fda23177-96bc-44b3-9d93-40d1d2c7cd79', 'name': 'volume-fda23177-96bc-44b3-9d93-40d1d2c7cd79', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '14512ed2-9eae-4753-b83c-8c0d0d5d9432', 'attached_at': '', 'detached_at': '', 'volume_id': 'fda23177-96bc-44b3-9d93-40d1d2c7cd79', 'serial': 'fda23177-96bc-44b3-9d93-40d1d2c7cd79'} {{(pid=62405) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 2018.062873] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6b4e549-7229-476f-a579-e471374c0df7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.082786] env[62405]: INFO nova.compute.manager [None req-f65dd5e5-33d0-46b0-8885-4c7706c30b65 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: b2eae940-22bc-4c87-842f-30fbd04eba28] Rescuing [ 2018.082786] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f65dd5e5-33d0-46b0-8885-4c7706c30b65 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Acquiring lock "refresh_cache-b2eae940-22bc-4c87-842f-30fbd04eba28" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2018.083047] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f65dd5e5-33d0-46b0-8885-4c7706c30b65 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Acquired lock "refresh_cache-b2eae940-22bc-4c87-842f-30fbd04eba28" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2018.083128] env[62405]: DEBUG nova.network.neutron [None req-f65dd5e5-33d0-46b0-8885-4c7706c30b65 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: b2eae940-22bc-4c87-842f-30fbd04eba28] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2018.089894] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e59603d-0db2-4725-8e90-879dd792d2c6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.096175] env[62405]: DEBUG oslo_concurrency.lockutils [None req-15599116-f656-4d9e-b029-d73ca7b26be0 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Releasing lock "refresh_cache-c39d9059-8da4-4c8d-99ab-d66b8445e7da" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2018.117025] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c2adba3b-ce85-4e8e-9483-bd2ae6ec5645 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Releasing lock "refresh_cache-798257f7-0590-4f82-82b0-d428cc6e6e92" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2018.117025] env[62405]: DEBUG nova.compute.manager [None req-c2adba3b-ce85-4e8e-9483-bd2ae6ec5645 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 798257f7-0590-4f82-82b0-d428cc6e6e92] Instance network_info: |[{"id": "ff462cd1-7fb0-4546-92d9-d317279b4c21", "address": "fa:16:3e:d9:55:da", "network": {"id": "c3707aa8-0488-4e47-a655-7bb7788995c0", "bridge": "br-int", "label": "tempest-ServersTestJSON-2054002489-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ba2fba100b943a2a415ec37b9365388", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "68ec9c06-8680-4a41-abad-cddbd1f768c9", "external-id": "nsx-vlan-transportzone-883", "segmentation_id": 883, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff462cd1-7f", "ovs_interfaceid": "ff462cd1-7fb0-4546-92d9-d317279b4c21", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2018.125534] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-b2e2eb2c-9451-40b7-9754-79d81790569f tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 14512ed2-9eae-4753-b83c-8c0d0d5d9432] Reconfiguring VM instance instance-00000067 to attach disk [datastore1] volume-fda23177-96bc-44b3-9d93-40d1d2c7cd79/volume-fda23177-96bc-44b3-9d93-40d1d2c7cd79.vmdk or device None with type thin {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2018.131333] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-c2adba3b-ce85-4e8e-9483-bd2ae6ec5645 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 798257f7-0590-4f82-82b0-d428cc6e6e92] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d9:55:da', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '68ec9c06-8680-4a41-abad-cddbd1f768c9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ff462cd1-7fb0-4546-92d9-d317279b4c21', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2018.141074] env[62405]: DEBUG oslo.service.loopingcall [None req-c2adba3b-ce85-4e8e-9483-bd2ae6ec5645 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2018.141370] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-230ae4ba-aa62-4084-be5d-5752b2db4dfa {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.162271] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 798257f7-0590-4f82-82b0-d428cc6e6e92] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2018.162742] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8077145f-7da7-45fc-beed-ef6bf4b1118f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.187121] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2018.187121] env[62405]: value = "task-1948073" [ 2018.187121] env[62405]: _type = "Task" [ 2018.187121] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2018.187483] env[62405]: DEBUG oslo_vmware.api [None req-b2e2eb2c-9451-40b7-9754-79d81790569f tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Waiting for the task: (returnval){ [ 2018.187483] env[62405]: value = "task-1948072" [ 2018.187483] env[62405]: _type = "Task" [ 2018.187483] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2018.199236] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1948073, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2018.205061] env[62405]: DEBUG oslo_vmware.api [None req-b2e2eb2c-9451-40b7-9754-79d81790569f tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1948072, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2018.262110] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4538537c-3abe-4bcb-bcf7-3a4f2d9349bb tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Lock "b495f9e6-60c8-4509-a34f-2e7ed59b6d82" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.447s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2018.289733] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8acc8845-c601-4273-b57b-2fb98109bd46 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.298516] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92338b87-c502-490b-8039-cf78fdabbde4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.331766] env[62405]: DEBUG nova.compute.manager [req-3ae4545d-c1ee-4085-866b-08f813994aa0 req-a00561f5-d934-45c0-aeb5-e04e98196c5d service nova] [instance: 798257f7-0590-4f82-82b0-d428cc6e6e92] Received event network-changed-ff462cd1-7fb0-4546-92d9-d317279b4c21 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2018.331979] env[62405]: DEBUG nova.compute.manager [req-3ae4545d-c1ee-4085-866b-08f813994aa0 req-a00561f5-d934-45c0-aeb5-e04e98196c5d service nova] [instance: 798257f7-0590-4f82-82b0-d428cc6e6e92] Refreshing instance network info cache due to event network-changed-ff462cd1-7fb0-4546-92d9-d317279b4c21. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 2018.332224] env[62405]: DEBUG oslo_concurrency.lockutils [req-3ae4545d-c1ee-4085-866b-08f813994aa0 req-a00561f5-d934-45c0-aeb5-e04e98196c5d service nova] Acquiring lock "refresh_cache-798257f7-0590-4f82-82b0-d428cc6e6e92" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2018.332386] env[62405]: DEBUG oslo_concurrency.lockutils [req-3ae4545d-c1ee-4085-866b-08f813994aa0 req-a00561f5-d934-45c0-aeb5-e04e98196c5d service nova] Acquired lock "refresh_cache-798257f7-0590-4f82-82b0-d428cc6e6e92" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2018.332558] env[62405]: DEBUG nova.network.neutron [req-3ae4545d-c1ee-4085-866b-08f813994aa0 req-a00561f5-d934-45c0-aeb5-e04e98196c5d service nova] [instance: 798257f7-0590-4f82-82b0-d428cc6e6e92] Refreshing network info cache for port ff462cd1-7fb0-4546-92d9-d317279b4c21 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2018.334254] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb96df3b-3565-4b77-943f-184b3ac7d7a3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.343859] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2c9f4f2-f4d4-4c1b-af14-e9b085585bdf {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.360952] env[62405]: DEBUG nova.compute.provider_tree [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2018.472316] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a64aee83-2ad5-47b0-847b-35027ca13f3e tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquiring lock "refresh_cache-15718289-5c19-4c2d-a9d8-d30ce0d63c68" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2018.472908] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a64aee83-2ad5-47b0-847b-35027ca13f3e tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquired lock "refresh_cache-15718289-5c19-4c2d-a9d8-d30ce0d63c68" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2018.475017] env[62405]: DEBUG nova.network.neutron [None req-a64aee83-2ad5-47b0-847b-35027ca13f3e tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2018.663258] env[62405]: DEBUG oslo_concurrency.lockutils [None req-59294620-f26d-422e-a81c-03234b10992d tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquiring lock "15718289-5c19-4c2d-a9d8-d30ce0d63c68" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2018.663531] env[62405]: DEBUG oslo_concurrency.lockutils [None req-59294620-f26d-422e-a81c-03234b10992d tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Lock "15718289-5c19-4c2d-a9d8-d30ce0d63c68" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2018.663689] env[62405]: DEBUG oslo_concurrency.lockutils [None req-59294620-f26d-422e-a81c-03234b10992d tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquiring lock "15718289-5c19-4c2d-a9d8-d30ce0d63c68-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2018.663875] env[62405]: DEBUG oslo_concurrency.lockutils [None req-59294620-f26d-422e-a81c-03234b10992d tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Lock "15718289-5c19-4c2d-a9d8-d30ce0d63c68-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2018.664061] env[62405]: DEBUG oslo_concurrency.lockutils [None req-59294620-f26d-422e-a81c-03234b10992d tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Lock "15718289-5c19-4c2d-a9d8-d30ce0d63c68-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2018.670890] env[62405]: INFO nova.compute.manager [None req-59294620-f26d-422e-a81c-03234b10992d tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] Terminating instance [ 2018.688039] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Acquiring lock "2e6ca3c8-d7e1-4a5d-9fdd-aa944fd9c2f8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2018.688475] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Lock "2e6ca3c8-d7e1-4a5d-9fdd-aa944fd9c2f8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2018.707880] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1948073, 'name': CreateVM_Task, 'duration_secs': 0.358561} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2018.708206] env[62405]: DEBUG oslo_vmware.api [None req-b2e2eb2c-9451-40b7-9754-79d81790569f tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1948072, 'name': ReconfigVM_Task, 'duration_secs': 0.410375} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2018.708367] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 798257f7-0590-4f82-82b0-d428cc6e6e92] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2018.708984] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-b2e2eb2c-9451-40b7-9754-79d81790569f tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 14512ed2-9eae-4753-b83c-8c0d0d5d9432] Reconfigured VM instance instance-00000067 to attach disk [datastore1] volume-fda23177-96bc-44b3-9d93-40d1d2c7cd79/volume-fda23177-96bc-44b3-9d93-40d1d2c7cd79.vmdk or device None with type thin {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2018.716327] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c2adba3b-ce85-4e8e-9483-bd2ae6ec5645 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2018.716327] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c2adba3b-ce85-4e8e-9483-bd2ae6ec5645 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2018.716477] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c2adba3b-ce85-4e8e-9483-bd2ae6ec5645 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2018.717028] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-87e8ea90-e961-429b-aeca-cb319c984944 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.727256] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-28c2eb02-903a-479f-8fff-ffc387daae88 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.733163] env[62405]: DEBUG oslo_vmware.api [None req-c2adba3b-ce85-4e8e-9483-bd2ae6ec5645 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Waiting for the task: (returnval){ [ 2018.733163] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52e21be5-afc1-f86e-cd27-4a74e2d0e7bc" [ 2018.733163] env[62405]: _type = "Task" [ 2018.733163] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2018.738190] env[62405]: DEBUG oslo_vmware.api [None req-b2e2eb2c-9451-40b7-9754-79d81790569f tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Waiting for the task: (returnval){ [ 2018.738190] env[62405]: value = "task-1948074" [ 2018.738190] env[62405]: _type = "Task" [ 2018.738190] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2018.744806] env[62405]: DEBUG oslo_vmware.api [None req-c2adba3b-ce85-4e8e-9483-bd2ae6ec5645 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52e21be5-afc1-f86e-cd27-4a74e2d0e7bc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2018.750321] env[62405]: DEBUG oslo_vmware.api [None req-b2e2eb2c-9451-40b7-9754-79d81790569f tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1948074, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2018.869081] env[62405]: DEBUG nova.scheduler.client.report [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2018.944270] env[62405]: DEBUG nova.network.neutron [None req-f65dd5e5-33d0-46b0-8885-4c7706c30b65 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: b2eae940-22bc-4c87-842f-30fbd04eba28] Updating instance_info_cache with network_info: [{"id": "14628f58-ebd5-4e11-8089-8c15cde335af", "address": "fa:16:3e:cc:cc:48", "network": {"id": "006b4fbf-fefb-47b8-b2e9-30e8308e87b9", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-369344299-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "4a78c04608454ac88ecb97b4c87a9d17", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6815237d-f565-474d-a3c0-9c675478eb00", "external-id": "nsx-vlan-transportzone-526", "segmentation_id": 526, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap14628f58-eb", "ovs_interfaceid": "14628f58-ebd5-4e11-8089-8c15cde335af", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2019.127447] env[62405]: DEBUG nova.network.neutron [req-3ae4545d-c1ee-4085-866b-08f813994aa0 req-a00561f5-d934-45c0-aeb5-e04e98196c5d service nova] [instance: 798257f7-0590-4f82-82b0-d428cc6e6e92] Updated VIF entry in instance network info cache for port ff462cd1-7fb0-4546-92d9-d317279b4c21. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2019.128290] env[62405]: DEBUG nova.network.neutron [req-3ae4545d-c1ee-4085-866b-08f813994aa0 req-a00561f5-d934-45c0-aeb5-e04e98196c5d service nova] [instance: 798257f7-0590-4f82-82b0-d428cc6e6e92] Updating instance_info_cache with network_info: [{"id": "ff462cd1-7fb0-4546-92d9-d317279b4c21", "address": "fa:16:3e:d9:55:da", "network": {"id": "c3707aa8-0488-4e47-a655-7bb7788995c0", "bridge": "br-int", "label": "tempest-ServersTestJSON-2054002489-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ba2fba100b943a2a415ec37b9365388", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "68ec9c06-8680-4a41-abad-cddbd1f768c9", "external-id": "nsx-vlan-transportzone-883", "segmentation_id": 883, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff462cd1-7f", "ovs_interfaceid": "ff462cd1-7fb0-4546-92d9-d317279b4c21", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2019.171898] env[62405]: DEBUG nova.compute.manager [None req-59294620-f26d-422e-a81c-03234b10992d tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2019.172568] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-59294620-f26d-422e-a81c-03234b10992d tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2019.173573] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de776e78-e063-40c9-990b-8311e5053efe {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.186419] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-59294620-f26d-422e-a81c-03234b10992d tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2019.190639] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6cbac7a3-174a-4dc8-89f9-9e84187915b7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.199261] env[62405]: DEBUG nova.compute.manager [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] [instance: 2e6ca3c8-d7e1-4a5d-9fdd-aa944fd9c2f8] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2019.202777] env[62405]: DEBUG oslo_vmware.api [None req-59294620-f26d-422e-a81c-03234b10992d tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Waiting for the task: (returnval){ [ 2019.202777] env[62405]: value = "task-1948075" [ 2019.202777] env[62405]: _type = "Task" [ 2019.202777] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2019.217537] env[62405]: DEBUG oslo_vmware.api [None req-59294620-f26d-422e-a81c-03234b10992d tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1948075, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2019.253595] env[62405]: DEBUG oslo_vmware.api [None req-c2adba3b-ce85-4e8e-9483-bd2ae6ec5645 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52e21be5-afc1-f86e-cd27-4a74e2d0e7bc, 'name': SearchDatastore_Task, 'duration_secs': 0.010067} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2019.254609] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c2adba3b-ce85-4e8e-9483-bd2ae6ec5645 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2019.254882] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-c2adba3b-ce85-4e8e-9483-bd2ae6ec5645 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 798257f7-0590-4f82-82b0-d428cc6e6e92] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2019.256427] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c2adba3b-ce85-4e8e-9483-bd2ae6ec5645 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2019.256427] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c2adba3b-ce85-4e8e-9483-bd2ae6ec5645 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2019.256427] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-c2adba3b-ce85-4e8e-9483-bd2ae6ec5645 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2019.256811] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-33d7db2b-22b4-4950-8d7b-b792762c866b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.263637] env[62405]: DEBUG oslo_vmware.api [None req-b2e2eb2c-9451-40b7-9754-79d81790569f tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1948074, 'name': ReconfigVM_Task, 'duration_secs': 0.163713} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2019.267670] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-b2e2eb2c-9451-40b7-9754-79d81790569f tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 14512ed2-9eae-4753-b83c-8c0d0d5d9432] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-401577', 'volume_id': 'fda23177-96bc-44b3-9d93-40d1d2c7cd79', 'name': 'volume-fda23177-96bc-44b3-9d93-40d1d2c7cd79', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '14512ed2-9eae-4753-b83c-8c0d0d5d9432', 'attached_at': '', 'detached_at': '', 'volume_id': 'fda23177-96bc-44b3-9d93-40d1d2c7cd79', 'serial': 'fda23177-96bc-44b3-9d93-40d1d2c7cd79'} {{(pid=62405) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 2019.273248] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-c2adba3b-ce85-4e8e-9483-bd2ae6ec5645 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2019.273512] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-c2adba3b-ce85-4e8e-9483-bd2ae6ec5645 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2019.274383] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-89ae5849-6527-49e3-a1ae-c5cae51d6a81 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.285712] env[62405]: DEBUG oslo_vmware.api [None req-c2adba3b-ce85-4e8e-9483-bd2ae6ec5645 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Waiting for the task: (returnval){ [ 2019.285712] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]527a9067-9477-2674-98e5-47e929a58a11" [ 2019.285712] env[62405]: _type = "Task" [ 2019.285712] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2019.298644] env[62405]: DEBUG oslo_vmware.api [None req-c2adba3b-ce85-4e8e-9483-bd2ae6ec5645 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]527a9067-9477-2674-98e5-47e929a58a11, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2019.375079] env[62405]: DEBUG oslo_concurrency.lockutils [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.529s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2019.379075] env[62405]: DEBUG oslo_concurrency.lockutils [None req-dbe0dbbf-45a4-4bd8-8208-629704291c1f tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.939s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2019.381150] env[62405]: DEBUG nova.objects.instance [None req-dbe0dbbf-45a4-4bd8-8208-629704291c1f tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Lazy-loading 'resources' on Instance uuid f16e3d13-6db6-4f61-b0e4-661856a9166b {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2019.429834] env[62405]: INFO nova.network.neutron [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Updating port e84f02c8-cde2-4f59-88cd-ef80e8cc1bba with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 2019.446611] env[62405]: INFO nova.network.neutron [None req-a64aee83-2ad5-47b0-847b-35027ca13f3e tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] Port e5b357f2-b442-4514-aa4d-9234dfa04642 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 2019.446611] env[62405]: DEBUG nova.network.neutron [None req-a64aee83-2ad5-47b0-847b-35027ca13f3e tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] Updating instance_info_cache with network_info: [{"id": "3189d804-1d8d-4356-bbf0-e0bbda0a2d32", "address": "fa:16:3e:b3:8f:fe", "network": {"id": "8e7f7222-48db-4dd5-a9e8-9a6d2b598918", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1945720715-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.152", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba9083cddcc24345b6ea5d2cbbbec5ba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3189d804-1d", "ovs_interfaceid": "3189d804-1d8d-4356-bbf0-e0bbda0a2d32", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2019.447960] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f65dd5e5-33d0-46b0-8885-4c7706c30b65 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Releasing lock "refresh_cache-b2eae940-22bc-4c87-842f-30fbd04eba28" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2019.625282] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2882512e-fd8f-4b2b-b76d-f208bb36b52a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.646916] env[62405]: DEBUG oslo_concurrency.lockutils [req-3ae4545d-c1ee-4085-866b-08f813994aa0 req-a00561f5-d934-45c0-aeb5-e04e98196c5d service nova] Releasing lock "refresh_cache-798257f7-0590-4f82-82b0-d428cc6e6e92" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2019.647482] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-15599116-f656-4d9e-b029-d73ca7b26be0 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Updating instance 'c39d9059-8da4-4c8d-99ab-d66b8445e7da' progress to 0 {{(pid=62405) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2019.726506] env[62405]: DEBUG oslo_vmware.api [None req-59294620-f26d-422e-a81c-03234b10992d tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1948075, 'name': PowerOffVM_Task, 'duration_secs': 0.251451} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2019.726961] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-59294620-f26d-422e-a81c-03234b10992d tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2019.727229] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-59294620-f26d-422e-a81c-03234b10992d tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2019.727514] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e0785194-411d-4480-97c0-131420a9fdbe {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.733359] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2019.803224] env[62405]: DEBUG oslo_vmware.api [None req-c2adba3b-ce85-4e8e-9483-bd2ae6ec5645 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]527a9067-9477-2674-98e5-47e929a58a11, 'name': SearchDatastore_Task, 'duration_secs': 0.016781} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2019.804304] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3dfbf4e3-3232-4f48-bcb8-65023bc3108f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.812455] env[62405]: DEBUG oslo_vmware.api [None req-c2adba3b-ce85-4e8e-9483-bd2ae6ec5645 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Waiting for the task: (returnval){ [ 2019.812455] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5257d5b3-1394-64fb-c054-9ca15cc6433f" [ 2019.812455] env[62405]: _type = "Task" [ 2019.812455] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2019.822539] env[62405]: DEBUG oslo_vmware.api [None req-c2adba3b-ce85-4e8e-9483-bd2ae6ec5645 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5257d5b3-1394-64fb-c054-9ca15cc6433f, 'name': SearchDatastore_Task, 'duration_secs': 0.008771} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2019.823412] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c2adba3b-ce85-4e8e-9483-bd2ae6ec5645 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2019.823681] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2adba3b-ce85-4e8e-9483-bd2ae6ec5645 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 798257f7-0590-4f82-82b0-d428cc6e6e92/798257f7-0590-4f82-82b0-d428cc6e6e92.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2019.823940] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9158f34d-1067-428f-8417-7bf530851211 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.831176] env[62405]: DEBUG oslo_vmware.api [None req-c2adba3b-ce85-4e8e-9483-bd2ae6ec5645 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Waiting for the task: (returnval){ [ 2019.831176] env[62405]: value = "task-1948077" [ 2019.831176] env[62405]: _type = "Task" [ 2019.831176] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2019.839141] env[62405]: DEBUG oslo_vmware.api [None req-c2adba3b-ce85-4e8e-9483-bd2ae6ec5645 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1948077, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2019.892057] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-59294620-f26d-422e-a81c-03234b10992d tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2019.892451] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-59294620-f26d-422e-a81c-03234b10992d tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2019.892623] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-59294620-f26d-422e-a81c-03234b10992d tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Deleting the datastore file [datastore1] 15718289-5c19-4c2d-a9d8-d30ce0d63c68 {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2019.893298] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0a2001b7-58de-4c70-8db5-d4844e3fec33 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.902438] env[62405]: DEBUG oslo_vmware.api [None req-59294620-f26d-422e-a81c-03234b10992d tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Waiting for the task: (returnval){ [ 2019.902438] env[62405]: value = "task-1948078" [ 2019.902438] env[62405]: _type = "Task" [ 2019.902438] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2019.916031] env[62405]: DEBUG oslo_vmware.api [None req-59294620-f26d-422e-a81c-03234b10992d tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1948078, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2019.951712] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a64aee83-2ad5-47b0-847b-35027ca13f3e tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Releasing lock "refresh_cache-15718289-5c19-4c2d-a9d8-d30ce0d63c68" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2020.153536] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-15599116-f656-4d9e-b029-d73ca7b26be0 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2020.153923] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0a63c022-7180-44e0-acda-621ec435b308 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.162922] env[62405]: DEBUG oslo_vmware.api [None req-15599116-f656-4d9e-b029-d73ca7b26be0 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for the task: (returnval){ [ 2020.162922] env[62405]: value = "task-1948079" [ 2020.162922] env[62405]: _type = "Task" [ 2020.162922] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2020.177266] env[62405]: DEBUG oslo_vmware.api [None req-15599116-f656-4d9e-b029-d73ca7b26be0 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948079, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2020.256611] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04da1106-a374-4ce8-9226-9b6de039eabf {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.267930] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e83ca66e-8ce1-4794-acdf-563f1d683a1f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.303514] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f9a36b9-d93d-49b2-80f1-f388d517908d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.312424] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39a3f5c6-9b2a-4744-94d0-5413e2dcdbea {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.317264] env[62405]: DEBUG nova.objects.instance [None req-b2e2eb2c-9451-40b7-9754-79d81790569f tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Lazy-loading 'flavor' on Instance uuid 14512ed2-9eae-4753-b83c-8c0d0d5d9432 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2020.331363] env[62405]: DEBUG nova.compute.provider_tree [None req-dbe0dbbf-45a4-4bd8-8208-629704291c1f tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2020.343937] env[62405]: DEBUG oslo_vmware.api [None req-c2adba3b-ce85-4e8e-9483-bd2ae6ec5645 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1948077, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.49359} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2020.344451] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2adba3b-ce85-4e8e-9483-bd2ae6ec5645 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 798257f7-0590-4f82-82b0-d428cc6e6e92/798257f7-0590-4f82-82b0-d428cc6e6e92.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2020.344691] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-c2adba3b-ce85-4e8e-9483-bd2ae6ec5645 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 798257f7-0590-4f82-82b0-d428cc6e6e92] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2020.345387] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a37302dd-b2da-4e12-bce9-09cb4e3b8ef3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.354019] env[62405]: DEBUG oslo_vmware.api [None req-c2adba3b-ce85-4e8e-9483-bd2ae6ec5645 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Waiting for the task: (returnval){ [ 2020.354019] env[62405]: value = "task-1948080" [ 2020.354019] env[62405]: _type = "Task" [ 2020.354019] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2020.363144] env[62405]: DEBUG oslo_vmware.api [None req-c2adba3b-ce85-4e8e-9483-bd2ae6ec5645 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1948080, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2020.420971] env[62405]: DEBUG oslo_vmware.api [None req-59294620-f26d-422e-a81c-03234b10992d tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1948078, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.455719} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2020.421270] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-59294620-f26d-422e-a81c-03234b10992d tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2020.421459] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-59294620-f26d-422e-a81c-03234b10992d tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2020.421665] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-59294620-f26d-422e-a81c-03234b10992d tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2020.421837] env[62405]: INFO nova.compute.manager [None req-59294620-f26d-422e-a81c-03234b10992d tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] Took 1.25 seconds to destroy the instance on the hypervisor. [ 2020.422102] env[62405]: DEBUG oslo.service.loopingcall [None req-59294620-f26d-422e-a81c-03234b10992d tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2020.422327] env[62405]: DEBUG nova.compute.manager [-] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2020.422429] env[62405]: DEBUG nova.network.neutron [-] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2020.457139] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a64aee83-2ad5-47b0-847b-35027ca13f3e tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Lock "interface-15718289-5c19-4c2d-a9d8-d30ce0d63c68-e5b357f2-b442-4514-aa4d-9234dfa04642" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 10.453s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2020.673765] env[62405]: DEBUG oslo_vmware.api [None req-15599116-f656-4d9e-b029-d73ca7b26be0 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948079, 'name': PowerOffVM_Task, 'duration_secs': 0.339772} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2020.674132] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-15599116-f656-4d9e-b029-d73ca7b26be0 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2020.674442] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-15599116-f656-4d9e-b029-d73ca7b26be0 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Updating instance 'c39d9059-8da4-4c8d-99ab-d66b8445e7da' progress to 17 {{(pid=62405) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2020.821981] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b2e2eb2c-9451-40b7-9754-79d81790569f tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Lock "14512ed2-9eae-4753-b83c-8c0d0d5d9432" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.408s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2020.838298] env[62405]: DEBUG nova.scheduler.client.report [None req-dbe0dbbf-45a4-4bd8-8208-629704291c1f tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2020.864759] env[62405]: DEBUG oslo_vmware.api [None req-c2adba3b-ce85-4e8e-9483-bd2ae6ec5645 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1948080, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075588} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2020.865048] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-c2adba3b-ce85-4e8e-9483-bd2ae6ec5645 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 798257f7-0590-4f82-82b0-d428cc6e6e92] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2020.866025] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42fe23dd-b3b1-4282-b664-aa853e0042e9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.896360] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-c2adba3b-ce85-4e8e-9483-bd2ae6ec5645 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 798257f7-0590-4f82-82b0-d428cc6e6e92] Reconfiguring VM instance instance-0000006d to attach disk [datastore1] 798257f7-0590-4f82-82b0-d428cc6e6e92/798257f7-0590-4f82-82b0-d428cc6e6e92.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2020.896360] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-049660cc-dde2-4549-8f72-25ee1d5ded33 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.918103] env[62405]: DEBUG oslo_vmware.api [None req-c2adba3b-ce85-4e8e-9483-bd2ae6ec5645 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Waiting for the task: (returnval){ [ 2020.918103] env[62405]: value = "task-1948081" [ 2020.918103] env[62405]: _type = "Task" [ 2020.918103] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2020.932106] env[62405]: DEBUG oslo_vmware.api [None req-c2adba3b-ce85-4e8e-9483-bd2ae6ec5645 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1948081, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2020.998074] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-f65dd5e5-33d0-46b0-8885-4c7706c30b65 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: b2eae940-22bc-4c87-842f-30fbd04eba28] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2020.998421] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3a795128-2196-45f3-8d4e-ea6d006127f6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.007292] env[62405]: DEBUG oslo_vmware.api [None req-f65dd5e5-33d0-46b0-8885-4c7706c30b65 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Waiting for the task: (returnval){ [ 2021.007292] env[62405]: value = "task-1948082" [ 2021.007292] env[62405]: _type = "Task" [ 2021.007292] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2021.015432] env[62405]: DEBUG oslo_vmware.api [None req-f65dd5e5-33d0-46b0-8885-4c7706c30b65 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Task: {'id': task-1948082, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2021.042183] env[62405]: DEBUG oslo_concurrency.lockutils [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Acquiring lock "46b794f6-e858-45e6-9977-98ab246482f3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2021.042443] env[62405]: DEBUG oslo_concurrency.lockutils [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Lock "46b794f6-e858-45e6-9977-98ab246482f3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2021.142182] env[62405]: DEBUG nova.compute.manager [req-3513da30-1ce6-43d3-811e-a12c9f29401e req-7f0385ee-7b32-4f24-803f-dd8cc606a829 service nova] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Received event network-vif-plugged-e84f02c8-cde2-4f59-88cd-ef80e8cc1bba {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2021.142467] env[62405]: DEBUG oslo_concurrency.lockutils [req-3513da30-1ce6-43d3-811e-a12c9f29401e req-7f0385ee-7b32-4f24-803f-dd8cc606a829 service nova] Acquiring lock "6fcfada3-d73a-4814-bf45-d34b26d76d4a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2021.142643] env[62405]: DEBUG oslo_concurrency.lockutils [req-3513da30-1ce6-43d3-811e-a12c9f29401e req-7f0385ee-7b32-4f24-803f-dd8cc606a829 service nova] Lock "6fcfada3-d73a-4814-bf45-d34b26d76d4a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2021.143128] env[62405]: DEBUG oslo_concurrency.lockutils [req-3513da30-1ce6-43d3-811e-a12c9f29401e req-7f0385ee-7b32-4f24-803f-dd8cc606a829 service nova] Lock "6fcfada3-d73a-4814-bf45-d34b26d76d4a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2021.143579] env[62405]: DEBUG nova.compute.manager [req-3513da30-1ce6-43d3-811e-a12c9f29401e req-7f0385ee-7b32-4f24-803f-dd8cc606a829 service nova] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] No waiting events found dispatching network-vif-plugged-e84f02c8-cde2-4f59-88cd-ef80e8cc1bba {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2021.143827] env[62405]: WARNING nova.compute.manager [req-3513da30-1ce6-43d3-811e-a12c9f29401e req-7f0385ee-7b32-4f24-803f-dd8cc606a829 service nova] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Received unexpected event network-vif-plugged-e84f02c8-cde2-4f59-88cd-ef80e8cc1bba for instance with vm_state shelved_offloaded and task_state spawning. [ 2021.180936] env[62405]: DEBUG nova.virt.hardware [None req-15599116-f656-4d9e-b029-d73ca7b26be0 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:21Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2021.181244] env[62405]: DEBUG nova.virt.hardware [None req-15599116-f656-4d9e-b029-d73ca7b26be0 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2021.181380] env[62405]: DEBUG nova.virt.hardware [None req-15599116-f656-4d9e-b029-d73ca7b26be0 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2021.181590] env[62405]: DEBUG nova.virt.hardware [None req-15599116-f656-4d9e-b029-d73ca7b26be0 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2021.181758] env[62405]: DEBUG nova.virt.hardware [None req-15599116-f656-4d9e-b029-d73ca7b26be0 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2021.181910] env[62405]: DEBUG nova.virt.hardware [None req-15599116-f656-4d9e-b029-d73ca7b26be0 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2021.182375] env[62405]: DEBUG nova.virt.hardware [None req-15599116-f656-4d9e-b029-d73ca7b26be0 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2021.182637] env[62405]: DEBUG nova.virt.hardware [None req-15599116-f656-4d9e-b029-d73ca7b26be0 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2021.182838] env[62405]: DEBUG nova.virt.hardware [None req-15599116-f656-4d9e-b029-d73ca7b26be0 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2021.183042] env[62405]: DEBUG nova.virt.hardware [None req-15599116-f656-4d9e-b029-d73ca7b26be0 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2021.183699] env[62405]: DEBUG nova.virt.hardware [None req-15599116-f656-4d9e-b029-d73ca7b26be0 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2021.189769] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3568c5fd-8896-465d-9ebc-5facc19f2dee {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.211733] env[62405]: DEBUG oslo_vmware.api [None req-15599116-f656-4d9e-b029-d73ca7b26be0 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for the task: (returnval){ [ 2021.211733] env[62405]: value = "task-1948083" [ 2021.211733] env[62405]: _type = "Task" [ 2021.211733] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2021.224893] env[62405]: DEBUG oslo_vmware.api [None req-15599116-f656-4d9e-b029-d73ca7b26be0 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948083, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2021.256482] env[62405]: DEBUG oslo_concurrency.lockutils [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Acquiring lock "refresh_cache-6fcfada3-d73a-4814-bf45-d34b26d76d4a" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2021.256482] env[62405]: DEBUG oslo_concurrency.lockutils [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Acquired lock "refresh_cache-6fcfada3-d73a-4814-bf45-d34b26d76d4a" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2021.256629] env[62405]: DEBUG nova.network.neutron [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2021.272990] env[62405]: DEBUG oslo_concurrency.lockutils [None req-97008eaa-deb8-43cb-a8a1-cecba68fbb3f tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Acquiring lock "14512ed2-9eae-4753-b83c-8c0d0d5d9432" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2021.273270] env[62405]: DEBUG oslo_concurrency.lockutils [None req-97008eaa-deb8-43cb-a8a1-cecba68fbb3f tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Lock "14512ed2-9eae-4753-b83c-8c0d0d5d9432" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2021.343691] env[62405]: DEBUG oslo_concurrency.lockutils [None req-dbe0dbbf-45a4-4bd8-8208-629704291c1f tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.965s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2021.346371] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ba0f5f27-3e82-47b1-ad5c-60baefdce733 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.615s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2021.346801] env[62405]: DEBUG nova.objects.instance [None req-ba0f5f27-3e82-47b1-ad5c-60baefdce733 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Lazy-loading 'resources' on Instance uuid f269844b-a9b4-40a2-8ba4-a62ee59b4e40 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2021.380199] env[62405]: DEBUG nova.compute.manager [req-5ffc6280-79b2-4fcd-b1cb-d6a74a1783a3 req-a2470462-d1a6-4558-bbea-6adc633cb4e2 service nova] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] Received event network-vif-deleted-3189d804-1d8d-4356-bbf0-e0bbda0a2d32 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2021.380297] env[62405]: INFO nova.compute.manager [req-5ffc6280-79b2-4fcd-b1cb-d6a74a1783a3 req-a2470462-d1a6-4558-bbea-6adc633cb4e2 service nova] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] Neutron deleted interface 3189d804-1d8d-4356-bbf0-e0bbda0a2d32; detaching it from the instance and deleting it from the info cache [ 2021.380562] env[62405]: DEBUG nova.network.neutron [req-5ffc6280-79b2-4fcd-b1cb-d6a74a1783a3 req-a2470462-d1a6-4558-bbea-6adc633cb4e2 service nova] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2021.382242] env[62405]: INFO nova.scheduler.client.report [None req-dbe0dbbf-45a4-4bd8-8208-629704291c1f tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Deleted allocations for instance f16e3d13-6db6-4f61-b0e4-661856a9166b [ 2021.428539] env[62405]: DEBUG oslo_vmware.api [None req-c2adba3b-ce85-4e8e-9483-bd2ae6ec5645 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1948081, 'name': ReconfigVM_Task, 'duration_secs': 0.306541} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2021.428666] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-c2adba3b-ce85-4e8e-9483-bd2ae6ec5645 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 798257f7-0590-4f82-82b0-d428cc6e6e92] Reconfigured VM instance instance-0000006d to attach disk [datastore1] 798257f7-0590-4f82-82b0-d428cc6e6e92/798257f7-0590-4f82-82b0-d428cc6e6e92.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2021.429358] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dadadbe8-fbd7-42a3-9c61-10663c036afe {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.436517] env[62405]: DEBUG oslo_vmware.api [None req-c2adba3b-ce85-4e8e-9483-bd2ae6ec5645 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Waiting for the task: (returnval){ [ 2021.436517] env[62405]: value = "task-1948084" [ 2021.436517] env[62405]: _type = "Task" [ 2021.436517] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2021.444226] env[62405]: DEBUG oslo_vmware.api [None req-c2adba3b-ce85-4e8e-9483-bd2ae6ec5645 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1948084, 'name': Rename_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2021.518969] env[62405]: DEBUG oslo_vmware.api [None req-f65dd5e5-33d0-46b0-8885-4c7706c30b65 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Task: {'id': task-1948082, 'name': PowerOffVM_Task, 'duration_secs': 0.17322} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2021.519322] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-f65dd5e5-33d0-46b0-8885-4c7706c30b65 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: b2eae940-22bc-4c87-842f-30fbd04eba28] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2021.520191] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1743292-e81c-4a22-a6c2-c67fa3439090 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.543558] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6aae140-0377-4bb0-91eb-5bd2aaa64ce0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.547689] env[62405]: DEBUG nova.compute.manager [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 46b794f6-e858-45e6-9977-98ab246482f3] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2021.587967] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-f65dd5e5-33d0-46b0-8885-4c7706c30b65 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: b2eae940-22bc-4c87-842f-30fbd04eba28] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2021.588293] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-41a65047-97a8-4302-81e7-350b4c5e556c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.599232] env[62405]: DEBUG oslo_vmware.api [None req-f65dd5e5-33d0-46b0-8885-4c7706c30b65 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Waiting for the task: (returnval){ [ 2021.599232] env[62405]: value = "task-1948085" [ 2021.599232] env[62405]: _type = "Task" [ 2021.599232] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2021.609023] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-f65dd5e5-33d0-46b0-8885-4c7706c30b65 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: b2eae940-22bc-4c87-842f-30fbd04eba28] VM already powered off {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 2021.609023] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-f65dd5e5-33d0-46b0-8885-4c7706c30b65 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: b2eae940-22bc-4c87-842f-30fbd04eba28] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2021.609116] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f65dd5e5-33d0-46b0-8885-4c7706c30b65 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2021.609243] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f65dd5e5-33d0-46b0-8885-4c7706c30b65 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2021.609428] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-f65dd5e5-33d0-46b0-8885-4c7706c30b65 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2021.609696] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a23bdc47-2447-42d4-9b9f-b00b5ecc1e22 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.618190] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-f65dd5e5-33d0-46b0-8885-4c7706c30b65 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2021.618372] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-f65dd5e5-33d0-46b0-8885-4c7706c30b65 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2021.619068] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a59d92e7-c488-4080-8f9c-71bea1eee055 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.627012] env[62405]: DEBUG oslo_vmware.api [None req-f65dd5e5-33d0-46b0-8885-4c7706c30b65 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Waiting for the task: (returnval){ [ 2021.627012] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52eb8ac9-a23e-1868-ccce-cb9323eed59f" [ 2021.627012] env[62405]: _type = "Task" [ 2021.627012] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2021.635898] env[62405]: DEBUG oslo_vmware.api [None req-f65dd5e5-33d0-46b0-8885-4c7706c30b65 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52eb8ac9-a23e-1868-ccce-cb9323eed59f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2021.721968] env[62405]: DEBUG oslo_vmware.api [None req-15599116-f656-4d9e-b029-d73ca7b26be0 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948083, 'name': ReconfigVM_Task, 'duration_secs': 0.444644} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2021.722433] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-15599116-f656-4d9e-b029-d73ca7b26be0 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Updating instance 'c39d9059-8da4-4c8d-99ab-d66b8445e7da' progress to 33 {{(pid=62405) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2021.775803] env[62405]: INFO nova.compute.manager [None req-97008eaa-deb8-43cb-a8a1-cecba68fbb3f tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 14512ed2-9eae-4753-b83c-8c0d0d5d9432] Detaching volume fda23177-96bc-44b3-9d93-40d1d2c7cd79 [ 2021.812109] env[62405]: INFO nova.virt.block_device [None req-97008eaa-deb8-43cb-a8a1-cecba68fbb3f tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 14512ed2-9eae-4753-b83c-8c0d0d5d9432] Attempting to driver detach volume fda23177-96bc-44b3-9d93-40d1d2c7cd79 from mountpoint /dev/sdb [ 2021.812351] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-97008eaa-deb8-43cb-a8a1-cecba68fbb3f tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 14512ed2-9eae-4753-b83c-8c0d0d5d9432] Volume detach. Driver type: vmdk {{(pid=62405) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2021.812540] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-97008eaa-deb8-43cb-a8a1-cecba68fbb3f tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 14512ed2-9eae-4753-b83c-8c0d0d5d9432] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-401577', 'volume_id': 'fda23177-96bc-44b3-9d93-40d1d2c7cd79', 'name': 'volume-fda23177-96bc-44b3-9d93-40d1d2c7cd79', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '14512ed2-9eae-4753-b83c-8c0d0d5d9432', 'attached_at': '', 'detached_at': '', 'volume_id': 'fda23177-96bc-44b3-9d93-40d1d2c7cd79', 'serial': 'fda23177-96bc-44b3-9d93-40d1d2c7cd79'} {{(pid=62405) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2021.813802] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74482a4c-1272-43b8-886e-2b3a680fa0d2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.834687] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-623c9e0e-8bd5-48fd-9f10-f178e96fe5fe {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.841662] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6022dde-8c1d-4173-9897-6c0ff313939b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.863442] env[62405]: DEBUG nova.network.neutron [-] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2021.865327] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e66c74a-7345-4cb4-8abe-389f8f20b8f6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.883298] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-97008eaa-deb8-43cb-a8a1-cecba68fbb3f tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] The volume has not been displaced from its original location: [datastore1] volume-fda23177-96bc-44b3-9d93-40d1d2c7cd79/volume-fda23177-96bc-44b3-9d93-40d1d2c7cd79.vmdk. No consolidation needed. {{(pid=62405) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2021.888990] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-97008eaa-deb8-43cb-a8a1-cecba68fbb3f tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 14512ed2-9eae-4753-b83c-8c0d0d5d9432] Reconfiguring VM instance instance-00000067 to detach disk 2001 {{(pid=62405) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2021.894125] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-00df4ee6-2b1f-43ae-9f31-12920037ab98 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.907640] env[62405]: DEBUG oslo_concurrency.lockutils [None req-dbe0dbbf-45a4-4bd8-8208-629704291c1f tempest-ServersNegativeTestJSON-1262554471 tempest-ServersNegativeTestJSON-1262554471-project-member] Lock "f16e3d13-6db6-4f61-b0e4-661856a9166b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.056s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2021.908874] env[62405]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f381ea82-2eb2-4b8c-8550-807f14e03268 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.918946] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02764517-8e7a-4039-b776-c90fde85b8b6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.937950] env[62405]: DEBUG oslo_vmware.api [None req-97008eaa-deb8-43cb-a8a1-cecba68fbb3f tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Waiting for the task: (returnval){ [ 2021.937950] env[62405]: value = "task-1948086" [ 2021.937950] env[62405]: _type = "Task" [ 2021.937950] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2021.969083] env[62405]: DEBUG oslo_vmware.api [None req-97008eaa-deb8-43cb-a8a1-cecba68fbb3f tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1948086, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2021.969485] env[62405]: DEBUG nova.compute.manager [req-5ffc6280-79b2-4fcd-b1cb-d6a74a1783a3 req-a2470462-d1a6-4558-bbea-6adc633cb4e2 service nova] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] Detach interface failed, port_id=3189d804-1d8d-4356-bbf0-e0bbda0a2d32, reason: Instance 15718289-5c19-4c2d-a9d8-d30ce0d63c68 could not be found. {{(pid=62405) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11483}} [ 2021.969948] env[62405]: DEBUG oslo_vmware.api [None req-c2adba3b-ce85-4e8e-9483-bd2ae6ec5645 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1948084, 'name': Rename_Task, 'duration_secs': 0.138933} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2021.972883] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2adba3b-ce85-4e8e-9483-bd2ae6ec5645 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 798257f7-0590-4f82-82b0-d428cc6e6e92] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2021.973150] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a88f640f-608d-4ff7-81dd-dd44898404b1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.982848] env[62405]: DEBUG oslo_vmware.api [None req-c2adba3b-ce85-4e8e-9483-bd2ae6ec5645 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Waiting for the task: (returnval){ [ 2021.982848] env[62405]: value = "task-1948087" [ 2021.982848] env[62405]: _type = "Task" [ 2021.982848] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2021.993913] env[62405]: DEBUG oslo_vmware.api [None req-c2adba3b-ce85-4e8e-9483-bd2ae6ec5645 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1948087, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2022.070906] env[62405]: DEBUG oslo_concurrency.lockutils [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2022.133230] env[62405]: DEBUG nova.network.neutron [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Updating instance_info_cache with network_info: [{"id": "e84f02c8-cde2-4f59-88cd-ef80e8cc1bba", "address": "fa:16:3e:f1:09:bd", "network": {"id": "feb681f7-0a8f-4000-89ec-88a027ee7f15", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-478938422-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.209", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28cfe90f16b140018a5802c02f751d9c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c894ab55-c869-4530-9702-cb46d173ce94", "external-id": "nsx-vlan-transportzone-792", "segmentation_id": 792, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape84f02c8-cd", "ovs_interfaceid": "e84f02c8-cde2-4f59-88cd-ef80e8cc1bba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2022.137916] env[62405]: DEBUG oslo_vmware.api [None req-f65dd5e5-33d0-46b0-8885-4c7706c30b65 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52eb8ac9-a23e-1868-ccce-cb9323eed59f, 'name': SearchDatastore_Task, 'duration_secs': 0.009126} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2022.141472] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1ba4bf57-1d83-4151-b105-737c280abeb0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.147041] env[62405]: DEBUG oslo_vmware.api [None req-f65dd5e5-33d0-46b0-8885-4c7706c30b65 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Waiting for the task: (returnval){ [ 2022.147041] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52770dc6-f582-1797-c4be-a890a804b354" [ 2022.147041] env[62405]: _type = "Task" [ 2022.147041] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2022.158155] env[62405]: DEBUG oslo_vmware.api [None req-f65dd5e5-33d0-46b0-8885-4c7706c30b65 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52770dc6-f582-1797-c4be-a890a804b354, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2022.201328] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5243a510-2fc9-406b-a4be-c4919d6b3c6f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.208937] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-931ef3f1-a7ba-47ff-8ada-cf92a143d4b3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.244910] env[62405]: DEBUG nova.virt.hardware [None req-15599116-f656-4d9e-b029-d73ca7b26be0 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2022.246343] env[62405]: DEBUG nova.virt.hardware [None req-15599116-f656-4d9e-b029-d73ca7b26be0 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2022.246343] env[62405]: DEBUG nova.virt.hardware [None req-15599116-f656-4d9e-b029-d73ca7b26be0 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2022.246343] env[62405]: DEBUG nova.virt.hardware [None req-15599116-f656-4d9e-b029-d73ca7b26be0 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2022.246343] env[62405]: DEBUG nova.virt.hardware [None req-15599116-f656-4d9e-b029-d73ca7b26be0 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2022.246343] env[62405]: DEBUG nova.virt.hardware [None req-15599116-f656-4d9e-b029-d73ca7b26be0 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2022.246343] env[62405]: DEBUG nova.virt.hardware [None req-15599116-f656-4d9e-b029-d73ca7b26be0 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2022.246343] env[62405]: DEBUG nova.virt.hardware [None req-15599116-f656-4d9e-b029-d73ca7b26be0 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2022.246640] env[62405]: DEBUG nova.virt.hardware [None req-15599116-f656-4d9e-b029-d73ca7b26be0 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2022.246640] env[62405]: DEBUG nova.virt.hardware [None req-15599116-f656-4d9e-b029-d73ca7b26be0 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2022.246777] env[62405]: DEBUG nova.virt.hardware [None req-15599116-f656-4d9e-b029-d73ca7b26be0 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2022.252429] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-15599116-f656-4d9e-b029-d73ca7b26be0 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Reconfiguring VM instance instance-00000057 to detach disk 2000 {{(pid=62405) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2022.253037] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eb62b229-2631-4c8e-b190-56b868713941 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.266855] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bdc0676-76a1-42d0-959c-a3b4453cbf9e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.276131] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9209c78-6e74-41a7-b903-aecf947bc434 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.283602] env[62405]: DEBUG oslo_vmware.api [None req-15599116-f656-4d9e-b029-d73ca7b26be0 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for the task: (returnval){ [ 2022.283602] env[62405]: value = "task-1948088" [ 2022.283602] env[62405]: _type = "Task" [ 2022.283602] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2022.295890] env[62405]: DEBUG nova.compute.provider_tree [None req-ba0f5f27-3e82-47b1-ad5c-60baefdce733 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2022.300828] env[62405]: DEBUG oslo_vmware.api [None req-15599116-f656-4d9e-b029-d73ca7b26be0 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948088, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2022.370825] env[62405]: INFO nova.compute.manager [-] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] Took 1.95 seconds to deallocate network for instance. [ 2022.458338] env[62405]: DEBUG oslo_vmware.api [None req-97008eaa-deb8-43cb-a8a1-cecba68fbb3f tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1948086, 'name': ReconfigVM_Task, 'duration_secs': 0.340808} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2022.458724] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-97008eaa-deb8-43cb-a8a1-cecba68fbb3f tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 14512ed2-9eae-4753-b83c-8c0d0d5d9432] Reconfigured VM instance instance-00000067 to detach disk 2001 {{(pid=62405) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2022.464274] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-746d56da-9390-46dc-9823-66ae78f66af0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.480875] env[62405]: DEBUG oslo_vmware.api [None req-97008eaa-deb8-43cb-a8a1-cecba68fbb3f tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Waiting for the task: (returnval){ [ 2022.480875] env[62405]: value = "task-1948089" [ 2022.480875] env[62405]: _type = "Task" [ 2022.480875] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2022.495448] env[62405]: DEBUG oslo_vmware.api [None req-c2adba3b-ce85-4e8e-9483-bd2ae6ec5645 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1948087, 'name': PowerOnVM_Task, 'duration_secs': 0.452264} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2022.499285] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2adba3b-ce85-4e8e-9483-bd2ae6ec5645 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 798257f7-0590-4f82-82b0-d428cc6e6e92] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2022.499725] env[62405]: INFO nova.compute.manager [None req-c2adba3b-ce85-4e8e-9483-bd2ae6ec5645 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 798257f7-0590-4f82-82b0-d428cc6e6e92] Took 7.08 seconds to spawn the instance on the hypervisor. [ 2022.499725] env[62405]: DEBUG nova.compute.manager [None req-c2adba3b-ce85-4e8e-9483-bd2ae6ec5645 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 798257f7-0590-4f82-82b0-d428cc6e6e92] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2022.499954] env[62405]: DEBUG oslo_vmware.api [None req-97008eaa-deb8-43cb-a8a1-cecba68fbb3f tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1948089, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2022.500721] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27199797-f1cf-4f96-9947-666fdf0e5106 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.639786] env[62405]: DEBUG oslo_concurrency.lockutils [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Releasing lock "refresh_cache-6fcfada3-d73a-4814-bf45-d34b26d76d4a" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2022.657389] env[62405]: DEBUG oslo_vmware.api [None req-f65dd5e5-33d0-46b0-8885-4c7706c30b65 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52770dc6-f582-1797-c4be-a890a804b354, 'name': SearchDatastore_Task, 'duration_secs': 0.011686} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2022.658565] env[62405]: DEBUG oslo_concurrency.lockutils [None req-f65dd5e5-33d0-46b0-8885-4c7706c30b65 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2022.658565] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-f65dd5e5-33d0-46b0-8885-4c7706c30b65 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] b2eae940-22bc-4c87-842f-30fbd04eba28/e6bba7a8-c2de-41dc-871a-3859bba5f4f9-rescue.vmdk. {{(pid=62405) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 2022.658565] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5f92a91b-4b86-4340-971d-358b76f0fa1f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.666905] env[62405]: DEBUG oslo_vmware.api [None req-f65dd5e5-33d0-46b0-8885-4c7706c30b65 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Waiting for the task: (returnval){ [ 2022.666905] env[62405]: value = "task-1948090" [ 2022.666905] env[62405]: _type = "Task" [ 2022.666905] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2022.672673] env[62405]: DEBUG nova.virt.hardware [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='e43fda5d4da7e8d144b83fe9d5220a92',container_format='bare',created_at=2024-12-21T03:29:37Z,direct_url=,disk_format='vmdk',id=ca187b4d-a52b-4628-a4f9-f6cf89613d47,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-1380426529-shelved',owner='28cfe90f16b140018a5802c02f751d9c',properties=ImageMetaProps,protected=,size=31669248,status='active',tags=,updated_at=2024-12-21T03:29:54Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2022.672913] env[62405]: DEBUG nova.virt.hardware [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2022.673139] env[62405]: DEBUG nova.virt.hardware [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2022.673342] env[62405]: DEBUG nova.virt.hardware [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2022.673555] env[62405]: DEBUG nova.virt.hardware [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2022.673641] env[62405]: DEBUG nova.virt.hardware [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2022.673851] env[62405]: DEBUG nova.virt.hardware [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2022.674024] env[62405]: DEBUG nova.virt.hardware [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2022.674268] env[62405]: DEBUG nova.virt.hardware [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2022.674457] env[62405]: DEBUG nova.virt.hardware [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2022.674636] env[62405]: DEBUG nova.virt.hardware [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2022.675428] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7684bf79-1dff-4eae-b334-1db7caf9dfcf {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.683073] env[62405]: DEBUG oslo_vmware.api [None req-f65dd5e5-33d0-46b0-8885-4c7706c30b65 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Task: {'id': task-1948090, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2022.686279] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb1e7213-9fef-453a-8977-35f4be801e1f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.707265] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f1:09:bd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c894ab55-c869-4530-9702-cb46d173ce94', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e84f02c8-cde2-4f59-88cd-ef80e8cc1bba', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2022.714676] env[62405]: DEBUG oslo.service.loopingcall [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2022.714899] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2022.715126] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0e381621-dbd2-479e-834f-1b3c9c6707fc {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.734906] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2022.734906] env[62405]: value = "task-1948091" [ 2022.734906] env[62405]: _type = "Task" [ 2022.734906] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2022.744185] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1948091, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2022.792319] env[62405]: DEBUG oslo_vmware.api [None req-15599116-f656-4d9e-b029-d73ca7b26be0 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948088, 'name': ReconfigVM_Task, 'duration_secs': 0.193921} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2022.792970] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-15599116-f656-4d9e-b029-d73ca7b26be0 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Reconfigured VM instance instance-00000057 to detach disk 2000 {{(pid=62405) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2022.793680] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70229faa-53ee-4066-814d-52b3a95d91d0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.813904] env[62405]: DEBUG nova.scheduler.client.report [None req-ba0f5f27-3e82-47b1-ad5c-60baefdce733 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2022.825687] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-15599116-f656-4d9e-b029-d73ca7b26be0 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Reconfiguring VM instance instance-00000057 to attach disk [datastore1] c39d9059-8da4-4c8d-99ab-d66b8445e7da/c39d9059-8da4-4c8d-99ab-d66b8445e7da.vmdk or device None with type thin {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2022.826532] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-59298a75-116d-4b7b-818c-92d54a010449 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.846170] env[62405]: DEBUG oslo_vmware.api [None req-15599116-f656-4d9e-b029-d73ca7b26be0 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for the task: (returnval){ [ 2022.846170] env[62405]: value = "task-1948092" [ 2022.846170] env[62405]: _type = "Task" [ 2022.846170] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2022.857262] env[62405]: DEBUG oslo_vmware.api [None req-15599116-f656-4d9e-b029-d73ca7b26be0 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948092, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2022.882914] env[62405]: DEBUG oslo_concurrency.lockutils [None req-59294620-f26d-422e-a81c-03234b10992d tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2022.991649] env[62405]: DEBUG oslo_vmware.api [None req-97008eaa-deb8-43cb-a8a1-cecba68fbb3f tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1948089, 'name': ReconfigVM_Task, 'duration_secs': 0.222054} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2022.992151] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-97008eaa-deb8-43cb-a8a1-cecba68fbb3f tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 14512ed2-9eae-4753-b83c-8c0d0d5d9432] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-401577', 'volume_id': 'fda23177-96bc-44b3-9d93-40d1d2c7cd79', 'name': 'volume-fda23177-96bc-44b3-9d93-40d1d2c7cd79', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '14512ed2-9eae-4753-b83c-8c0d0d5d9432', 'attached_at': '', 'detached_at': '', 'volume_id': 'fda23177-96bc-44b3-9d93-40d1d2c7cd79', 'serial': 'fda23177-96bc-44b3-9d93-40d1d2c7cd79'} {{(pid=62405) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2023.019922] env[62405]: INFO nova.compute.manager [None req-c2adba3b-ce85-4e8e-9483-bd2ae6ec5645 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 798257f7-0590-4f82-82b0-d428cc6e6e92] Took 14.74 seconds to build instance. [ 2023.180410] env[62405]: DEBUG oslo_vmware.api [None req-f65dd5e5-33d0-46b0-8885-4c7706c30b65 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Task: {'id': task-1948090, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2023.253125] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1948091, 'name': CreateVM_Task, 'duration_secs': 0.443575} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2023.253125] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2023.257643] env[62405]: DEBUG oslo_concurrency.lockutils [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ca187b4d-a52b-4628-a4f9-f6cf89613d47" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2023.257643] env[62405]: DEBUG oslo_concurrency.lockutils [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ca187b4d-a52b-4628-a4f9-f6cf89613d47" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2023.257643] env[62405]: DEBUG oslo_concurrency.lockutils [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ca187b4d-a52b-4628-a4f9-f6cf89613d47" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2023.257643] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c44d17d0-791b-4485-ade5-1be63616d3dd {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.260038] env[62405]: DEBUG nova.compute.manager [req-bdd0981e-0687-421d-93a5-2b7defe9c299 req-78fcf39f-689b-497e-b445-20e110dbb9ec service nova] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Received event network-changed-e84f02c8-cde2-4f59-88cd-ef80e8cc1bba {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2023.260256] env[62405]: DEBUG nova.compute.manager [req-bdd0981e-0687-421d-93a5-2b7defe9c299 req-78fcf39f-689b-497e-b445-20e110dbb9ec service nova] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Refreshing instance network info cache due to event network-changed-e84f02c8-cde2-4f59-88cd-ef80e8cc1bba. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 2023.260664] env[62405]: DEBUG oslo_concurrency.lockutils [req-bdd0981e-0687-421d-93a5-2b7defe9c299 req-78fcf39f-689b-497e-b445-20e110dbb9ec service nova] Acquiring lock "refresh_cache-6fcfada3-d73a-4814-bf45-d34b26d76d4a" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2023.260664] env[62405]: DEBUG oslo_concurrency.lockutils [req-bdd0981e-0687-421d-93a5-2b7defe9c299 req-78fcf39f-689b-497e-b445-20e110dbb9ec service nova] Acquired lock "refresh_cache-6fcfada3-d73a-4814-bf45-d34b26d76d4a" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2023.260885] env[62405]: DEBUG nova.network.neutron [req-bdd0981e-0687-421d-93a5-2b7defe9c299 req-78fcf39f-689b-497e-b445-20e110dbb9ec service nova] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Refreshing network info cache for port e84f02c8-cde2-4f59-88cd-ef80e8cc1bba {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2023.270575] env[62405]: DEBUG oslo_vmware.api [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Waiting for the task: (returnval){ [ 2023.270575] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52ec775f-14d7-a625-9eec-48f3a58842b5" [ 2023.270575] env[62405]: _type = "Task" [ 2023.270575] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2023.284834] env[62405]: DEBUG oslo_vmware.api [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52ec775f-14d7-a625-9eec-48f3a58842b5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2023.328171] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ba0f5f27-3e82-47b1-ad5c-60baefdce733 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.982s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2023.331409] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.598s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2023.333074] env[62405]: INFO nova.compute.claims [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] [instance: 2e6ca3c8-d7e1-4a5d-9fdd-aa944fd9c2f8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2023.356882] env[62405]: DEBUG oslo_vmware.api [None req-15599116-f656-4d9e-b029-d73ca7b26be0 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948092, 'name': ReconfigVM_Task, 'duration_secs': 0.509329} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2023.357856] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-15599116-f656-4d9e-b029-d73ca7b26be0 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Reconfigured VM instance instance-00000057 to attach disk [datastore1] c39d9059-8da4-4c8d-99ab-d66b8445e7da/c39d9059-8da4-4c8d-99ab-d66b8445e7da.vmdk or device None with type thin {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2023.357856] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-15599116-f656-4d9e-b029-d73ca7b26be0 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Updating instance 'c39d9059-8da4-4c8d-99ab-d66b8445e7da' progress to 50 {{(pid=62405) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2023.363995] env[62405]: INFO nova.scheduler.client.report [None req-ba0f5f27-3e82-47b1-ad5c-60baefdce733 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Deleted allocations for instance f269844b-a9b4-40a2-8ba4-a62ee59b4e40 [ 2023.522591] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c2adba3b-ce85-4e8e-9483-bd2ae6ec5645 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Lock "798257f7-0590-4f82-82b0-d428cc6e6e92" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.258s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2023.538840] env[62405]: DEBUG nova.objects.instance [None req-97008eaa-deb8-43cb-a8a1-cecba68fbb3f tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Lazy-loading 'flavor' on Instance uuid 14512ed2-9eae-4753-b83c-8c0d0d5d9432 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2023.678934] env[62405]: DEBUG oslo_vmware.api [None req-f65dd5e5-33d0-46b0-8885-4c7706c30b65 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Task: {'id': task-1948090, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.630938} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2023.679271] env[62405]: INFO nova.virt.vmwareapi.ds_util [None req-f65dd5e5-33d0-46b0-8885-4c7706c30b65 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] b2eae940-22bc-4c87-842f-30fbd04eba28/e6bba7a8-c2de-41dc-871a-3859bba5f4f9-rescue.vmdk. [ 2023.680116] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f9d48b7-3bc9-47c1-948e-5ad0023e7893 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.706988] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-f65dd5e5-33d0-46b0-8885-4c7706c30b65 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: b2eae940-22bc-4c87-842f-30fbd04eba28] Reconfiguring VM instance instance-0000006c to attach disk [datastore1] b2eae940-22bc-4c87-842f-30fbd04eba28/e6bba7a8-c2de-41dc-871a-3859bba5f4f9-rescue.vmdk or device None with type thin {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2023.707313] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-12139d1e-3726-4f01-8796-36c933f505ef {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.725036] env[62405]: DEBUG oslo_vmware.api [None req-f65dd5e5-33d0-46b0-8885-4c7706c30b65 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Waiting for the task: (returnval){ [ 2023.725036] env[62405]: value = "task-1948093" [ 2023.725036] env[62405]: _type = "Task" [ 2023.725036] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2023.733022] env[62405]: DEBUG oslo_vmware.api [None req-f65dd5e5-33d0-46b0-8885-4c7706c30b65 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Task: {'id': task-1948093, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2023.780907] env[62405]: DEBUG oslo_concurrency.lockutils [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ca187b4d-a52b-4628-a4f9-f6cf89613d47" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2023.781990] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Processing image ca187b4d-a52b-4628-a4f9-f6cf89613d47 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2023.781990] env[62405]: DEBUG oslo_concurrency.lockutils [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ca187b4d-a52b-4628-a4f9-f6cf89613d47/ca187b4d-a52b-4628-a4f9-f6cf89613d47.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2023.781990] env[62405]: DEBUG oslo_concurrency.lockutils [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ca187b4d-a52b-4628-a4f9-f6cf89613d47/ca187b4d-a52b-4628-a4f9-f6cf89613d47.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2023.782277] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2023.782622] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c5797a03-a7d8-467d-9daa-af7ea65a1dfb {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.791596] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2023.791842] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2023.792690] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7100bd71-c16d-4f5b-a50d-a4ee2f595775 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.799088] env[62405]: DEBUG oslo_vmware.api [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Waiting for the task: (returnval){ [ 2023.799088] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52f557e7-1011-6eac-2d49-81754f83b389" [ 2023.799088] env[62405]: _type = "Task" [ 2023.799088] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2023.812776] env[62405]: DEBUG oslo_vmware.api [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52f557e7-1011-6eac-2d49-81754f83b389, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2023.866245] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c648a13a-348d-4c44-88f0-f31fc0bfab27 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.872418] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ba0f5f27-3e82-47b1-ad5c-60baefdce733 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Lock "f269844b-a9b4-40a2-8ba4-a62ee59b4e40" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.726s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2023.898282] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af0b90b8-702f-4a55-8bbf-54f3f0a234bd {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.921104] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-15599116-f656-4d9e-b029-d73ca7b26be0 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Updating instance 'c39d9059-8da4-4c8d-99ab-d66b8445e7da' progress to 67 {{(pid=62405) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2023.932743] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5e508443-821d-4fe6-bea8-870efd396144 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Acquiring lock "798257f7-0590-4f82-82b0-d428cc6e6e92" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2023.932985] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5e508443-821d-4fe6-bea8-870efd396144 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Lock "798257f7-0590-4f82-82b0-d428cc6e6e92" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2023.933183] env[62405]: DEBUG nova.compute.manager [None req-5e508443-821d-4fe6-bea8-870efd396144 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 798257f7-0590-4f82-82b0-d428cc6e6e92] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2023.934041] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffffc355-812b-4dc9-b0ac-96e0ca6b7218 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.941313] env[62405]: DEBUG nova.compute.manager [None req-5e508443-821d-4fe6-bea8-870efd396144 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 798257f7-0590-4f82-82b0-d428cc6e6e92] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62405) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 2023.941882] env[62405]: DEBUG nova.objects.instance [None req-5e508443-821d-4fe6-bea8-870efd396144 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Lazy-loading 'flavor' on Instance uuid 798257f7-0590-4f82-82b0-d428cc6e6e92 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2024.045458] env[62405]: DEBUG nova.network.neutron [req-bdd0981e-0687-421d-93a5-2b7defe9c299 req-78fcf39f-689b-497e-b445-20e110dbb9ec service nova] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Updated VIF entry in instance network info cache for port e84f02c8-cde2-4f59-88cd-ef80e8cc1bba. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2024.045814] env[62405]: DEBUG nova.network.neutron [req-bdd0981e-0687-421d-93a5-2b7defe9c299 req-78fcf39f-689b-497e-b445-20e110dbb9ec service nova] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Updating instance_info_cache with network_info: [{"id": "e84f02c8-cde2-4f59-88cd-ef80e8cc1bba", "address": "fa:16:3e:f1:09:bd", "network": {"id": "feb681f7-0a8f-4000-89ec-88a027ee7f15", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-478938422-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.209", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28cfe90f16b140018a5802c02f751d9c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c894ab55-c869-4530-9702-cb46d173ce94", "external-id": "nsx-vlan-transportzone-792", "segmentation_id": 792, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape84f02c8-cd", "ovs_interfaceid": "e84f02c8-cde2-4f59-88cd-ef80e8cc1bba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2024.235602] env[62405]: DEBUG oslo_vmware.api [None req-f65dd5e5-33d0-46b0-8885-4c7706c30b65 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Task: {'id': task-1948093, 'name': ReconfigVM_Task, 'duration_secs': 0.296186} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2024.235915] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-f65dd5e5-33d0-46b0-8885-4c7706c30b65 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: b2eae940-22bc-4c87-842f-30fbd04eba28] Reconfigured VM instance instance-0000006c to attach disk [datastore1] b2eae940-22bc-4c87-842f-30fbd04eba28/e6bba7a8-c2de-41dc-871a-3859bba5f4f9-rescue.vmdk or device None with type thin {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2024.236800] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f18d3ef7-8577-4174-8a37-80c09ab9d7a7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2024.263953] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0f5e0198-cbf8-4673-93e6-ffa85c2cc5f9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2024.280333] env[62405]: DEBUG oslo_vmware.api [None req-f65dd5e5-33d0-46b0-8885-4c7706c30b65 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Waiting for the task: (returnval){ [ 2024.280333] env[62405]: value = "task-1948094" [ 2024.280333] env[62405]: _type = "Task" [ 2024.280333] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2024.288323] env[62405]: DEBUG oslo_vmware.api [None req-f65dd5e5-33d0-46b0-8885-4c7706c30b65 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Task: {'id': task-1948094, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2024.308766] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Preparing fetch location {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2024.309063] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Fetch image to [datastore1] OSTACK_IMG_358dec73-c3ca-4a71-9ae8-696bc7dff254/OSTACK_IMG_358dec73-c3ca-4a71-9ae8-696bc7dff254.vmdk {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2024.309308] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Downloading stream optimized image ca187b4d-a52b-4628-a4f9-f6cf89613d47 to [datastore1] OSTACK_IMG_358dec73-c3ca-4a71-9ae8-696bc7dff254/OSTACK_IMG_358dec73-c3ca-4a71-9ae8-696bc7dff254.vmdk on the data store datastore1 as vApp {{(pid=62405) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 2024.309491] env[62405]: DEBUG nova.virt.vmwareapi.images [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Downloading image file data ca187b4d-a52b-4628-a4f9-f6cf89613d47 to the ESX as VM named 'OSTACK_IMG_358dec73-c3ca-4a71-9ae8-696bc7dff254' {{(pid=62405) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 2024.387311] env[62405]: DEBUG oslo_vmware.rw_handles [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 2024.387311] env[62405]: value = "resgroup-9" [ 2024.387311] env[62405]: _type = "ResourcePool" [ 2024.387311] env[62405]: }. {{(pid=62405) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 2024.387592] env[62405]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-e1226e12-34a0-4e14-969b-75662a8d69e6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2024.411380] env[62405]: DEBUG oslo_vmware.rw_handles [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Lease: (returnval){ [ 2024.411380] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5216a488-dceb-7403-9ae3-3aa2664d7dbb" [ 2024.411380] env[62405]: _type = "HttpNfcLease" [ 2024.411380] env[62405]: } obtained for vApp import into resource pool (val){ [ 2024.411380] env[62405]: value = "resgroup-9" [ 2024.411380] env[62405]: _type = "ResourcePool" [ 2024.411380] env[62405]: }. {{(pid=62405) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 2024.411653] env[62405]: DEBUG oslo_vmware.api [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Waiting for the lease: (returnval){ [ 2024.411653] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5216a488-dceb-7403-9ae3-3aa2664d7dbb" [ 2024.411653] env[62405]: _type = "HttpNfcLease" [ 2024.411653] env[62405]: } to be ready. {{(pid=62405) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 2024.418926] env[62405]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2024.418926] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5216a488-dceb-7403-9ae3-3aa2664d7dbb" [ 2024.418926] env[62405]: _type = "HttpNfcLease" [ 2024.418926] env[62405]: } is initializing. {{(pid=62405) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2024.548407] env[62405]: DEBUG oslo_concurrency.lockutils [None req-97008eaa-deb8-43cb-a8a1-cecba68fbb3f tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Lock "14512ed2-9eae-4753-b83c-8c0d0d5d9432" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.275s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2024.549732] env[62405]: DEBUG oslo_concurrency.lockutils [req-bdd0981e-0687-421d-93a5-2b7defe9c299 req-78fcf39f-689b-497e-b445-20e110dbb9ec service nova] Releasing lock "refresh_cache-6fcfada3-d73a-4814-bf45-d34b26d76d4a" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2024.596038] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e09e01f8-084d-4cb1-aa58-6d5a3dbd4ce6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2024.606040] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86719af7-228a-4937-bfc1-6f8db39c571e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2024.634851] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3732fbaf-503d-4913-b5b9-17576fca5005 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Acquiring lock "41e5385d-f0c7-4431-8424-e60dbeebaf8e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2024.635363] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3732fbaf-503d-4913-b5b9-17576fca5005 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Lock "41e5385d-f0c7-4431-8424-e60dbeebaf8e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2024.635363] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3732fbaf-503d-4913-b5b9-17576fca5005 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Acquiring lock "41e5385d-f0c7-4431-8424-e60dbeebaf8e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2024.635531] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3732fbaf-503d-4913-b5b9-17576fca5005 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Lock "41e5385d-f0c7-4431-8424-e60dbeebaf8e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2024.635705] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3732fbaf-503d-4913-b5b9-17576fca5005 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Lock "41e5385d-f0c7-4431-8424-e60dbeebaf8e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2024.637944] env[62405]: INFO nova.compute.manager [None req-3732fbaf-503d-4913-b5b9-17576fca5005 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 41e5385d-f0c7-4431-8424-e60dbeebaf8e] Terminating instance [ 2024.639853] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d883f48-b875-4d10-a473-ef8612082a8c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2024.649135] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3d840cf-2eb1-43c5-a19b-f1bc7c0cea76 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2024.664879] env[62405]: DEBUG nova.compute.provider_tree [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2024.790176] env[62405]: DEBUG oslo_vmware.api [None req-f65dd5e5-33d0-46b0-8885-4c7706c30b65 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Task: {'id': task-1948094, 'name': ReconfigVM_Task, 'duration_secs': 0.168545} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2024.790473] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-f65dd5e5-33d0-46b0-8885-4c7706c30b65 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: b2eae940-22bc-4c87-842f-30fbd04eba28] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2024.790726] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-09cb190a-9b3b-4214-a646-32a6cbe8f201 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2024.797802] env[62405]: DEBUG oslo_vmware.api [None req-f65dd5e5-33d0-46b0-8885-4c7706c30b65 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Waiting for the task: (returnval){ [ 2024.797802] env[62405]: value = "task-1948096" [ 2024.797802] env[62405]: _type = "Task" [ 2024.797802] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2024.805558] env[62405]: DEBUG oslo_vmware.api [None req-f65dd5e5-33d0-46b0-8885-4c7706c30b65 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Task: {'id': task-1948096, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2024.921157] env[62405]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2024.921157] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5216a488-dceb-7403-9ae3-3aa2664d7dbb" [ 2024.921157] env[62405]: _type = "HttpNfcLease" [ 2024.921157] env[62405]: } is initializing. {{(pid=62405) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2024.948916] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e508443-821d-4fe6-bea8-870efd396144 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 798257f7-0590-4f82-82b0-d428cc6e6e92] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2024.949379] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-84a7d105-5839-4280-8b9d-9741200b2f32 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2024.958225] env[62405]: DEBUG oslo_vmware.api [None req-5e508443-821d-4fe6-bea8-870efd396144 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Waiting for the task: (returnval){ [ 2024.958225] env[62405]: value = "task-1948097" [ 2024.958225] env[62405]: _type = "Task" [ 2024.958225] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2024.966633] env[62405]: DEBUG oslo_vmware.api [None req-5e508443-821d-4fe6-bea8-870efd396144 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1948097, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2025.145077] env[62405]: DEBUG nova.compute.manager [None req-3732fbaf-503d-4913-b5b9-17576fca5005 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 41e5385d-f0c7-4431-8424-e60dbeebaf8e] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2025.145370] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-3732fbaf-503d-4913-b5b9-17576fca5005 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 41e5385d-f0c7-4431-8424-e60dbeebaf8e] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2025.146501] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-271e8372-8b88-464b-ab8c-cfc57e1c36d1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2025.154465] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-3732fbaf-503d-4913-b5b9-17576fca5005 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 41e5385d-f0c7-4431-8424-e60dbeebaf8e] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2025.154732] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2aa26f81-7d5c-4f14-a436-e1c88341194a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2025.162278] env[62405]: DEBUG oslo_vmware.api [None req-3732fbaf-503d-4913-b5b9-17576fca5005 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Waiting for the task: (returnval){ [ 2025.162278] env[62405]: value = "task-1948098" [ 2025.162278] env[62405]: _type = "Task" [ 2025.162278] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2025.168021] env[62405]: DEBUG nova.scheduler.client.report [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2025.176229] env[62405]: DEBUG oslo_vmware.api [None req-3732fbaf-503d-4913-b5b9-17576fca5005 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1948098, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2025.308779] env[62405]: DEBUG oslo_vmware.api [None req-f65dd5e5-33d0-46b0-8885-4c7706c30b65 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Task: {'id': task-1948096, 'name': PowerOnVM_Task, 'duration_secs': 0.405503} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2025.309083] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-f65dd5e5-33d0-46b0-8885-4c7706c30b65 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: b2eae940-22bc-4c87-842f-30fbd04eba28] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2025.311876] env[62405]: DEBUG nova.compute.manager [None req-f65dd5e5-33d0-46b0-8885-4c7706c30b65 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: b2eae940-22bc-4c87-842f-30fbd04eba28] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2025.312708] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23da40ed-8f20-4da7-ac6e-a6e3a40a12e8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2025.421947] env[62405]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2025.421947] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5216a488-dceb-7403-9ae3-3aa2664d7dbb" [ 2025.421947] env[62405]: _type = "HttpNfcLease" [ 2025.421947] env[62405]: } is ready. {{(pid=62405) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 2025.422279] env[62405]: DEBUG oslo_vmware.rw_handles [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 2025.422279] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5216a488-dceb-7403-9ae3-3aa2664d7dbb" [ 2025.422279] env[62405]: _type = "HttpNfcLease" [ 2025.422279] env[62405]: }. {{(pid=62405) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 2025.423006] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71680cc7-bab1-4e22-b852-6e22869fb5bd {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2025.430511] env[62405]: DEBUG oslo_vmware.rw_handles [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52d0320f-6b74-cda3-b56f-ac923220e3ae/disk-0.vmdk from lease info. {{(pid=62405) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 2025.430674] env[62405]: DEBUG oslo_vmware.rw_handles [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Creating HTTP connection to write to file with size = 31669248 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52d0320f-6b74-cda3-b56f-ac923220e3ae/disk-0.vmdk. {{(pid=62405) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2025.496982] env[62405]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-50a6f160-b1ba-4f3f-9bff-52fbbeb9b17d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2025.499112] env[62405]: DEBUG oslo_vmware.api [None req-5e508443-821d-4fe6-bea8-870efd396144 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1948097, 'name': PowerOffVM_Task, 'duration_secs': 0.180829} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2025.500452] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e508443-821d-4fe6-bea8-870efd396144 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 798257f7-0590-4f82-82b0-d428cc6e6e92] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2025.500671] env[62405]: DEBUG nova.compute.manager [None req-5e508443-821d-4fe6-bea8-870efd396144 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 798257f7-0590-4f82-82b0-d428cc6e6e92] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2025.501758] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3bc0edd-69a9-45ee-b260-47ec6c044353 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2025.578566] env[62405]: DEBUG nova.network.neutron [None req-15599116-f656-4d9e-b029-d73ca7b26be0 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Port 2026016a-87b1-42ae-a04f-d95c5fb37377 binding to destination host cpu-1 is already ACTIVE {{(pid=62405) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 2025.646912] env[62405]: DEBUG oslo_concurrency.lockutils [None req-bc39d78b-ba71-49b9-a985-c34c79513351 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Acquiring lock "14512ed2-9eae-4753-b83c-8c0d0d5d9432" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2025.646912] env[62405]: DEBUG oslo_concurrency.lockutils [None req-bc39d78b-ba71-49b9-a985-c34c79513351 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Lock "14512ed2-9eae-4753-b83c-8c0d0d5d9432" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2025.646912] env[62405]: DEBUG oslo_concurrency.lockutils [None req-bc39d78b-ba71-49b9-a985-c34c79513351 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Acquiring lock "14512ed2-9eae-4753-b83c-8c0d0d5d9432-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2025.646912] env[62405]: DEBUG oslo_concurrency.lockutils [None req-bc39d78b-ba71-49b9-a985-c34c79513351 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Lock "14512ed2-9eae-4753-b83c-8c0d0d5d9432-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2025.646912] env[62405]: DEBUG oslo_concurrency.lockutils [None req-bc39d78b-ba71-49b9-a985-c34c79513351 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Lock "14512ed2-9eae-4753-b83c-8c0d0d5d9432-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2025.650029] env[62405]: INFO nova.compute.manager [None req-bc39d78b-ba71-49b9-a985-c34c79513351 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 14512ed2-9eae-4753-b83c-8c0d0d5d9432] Terminating instance [ 2025.674331] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.343s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2025.675773] env[62405]: DEBUG oslo_vmware.api [None req-3732fbaf-503d-4913-b5b9-17576fca5005 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1948098, 'name': PowerOffVM_Task, 'duration_secs': 0.226299} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2025.675773] env[62405]: DEBUG oslo_concurrency.lockutils [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.605s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2025.677120] env[62405]: INFO nova.compute.claims [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 46b794f6-e858-45e6-9977-98ab246482f3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2025.681359] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-3732fbaf-503d-4913-b5b9-17576fca5005 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 41e5385d-f0c7-4431-8424-e60dbeebaf8e] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2025.681359] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-3732fbaf-503d-4913-b5b9-17576fca5005 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 41e5385d-f0c7-4431-8424-e60dbeebaf8e] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2025.682554] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8c68777a-ca5c-45fe-83b0-86e767ba2e6a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2026.014018] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5e508443-821d-4fe6-bea8-870efd396144 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Lock "798257f7-0590-4f82-82b0-d428cc6e6e92" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.081s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2026.154862] env[62405]: DEBUG nova.compute.manager [None req-bc39d78b-ba71-49b9-a985-c34c79513351 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 14512ed2-9eae-4753-b83c-8c0d0d5d9432] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2026.155150] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-bc39d78b-ba71-49b9-a985-c34c79513351 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 14512ed2-9eae-4753-b83c-8c0d0d5d9432] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2026.156108] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dfb22b8-ef5a-475f-8ecf-4f22f6fd98ae {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2026.166527] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc39d78b-ba71-49b9-a985-c34c79513351 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 14512ed2-9eae-4753-b83c-8c0d0d5d9432] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2026.168227] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f228aafb-bb9b-44e3-a8e6-6a7d1635ee38 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2026.176542] env[62405]: DEBUG oslo_vmware.api [None req-bc39d78b-ba71-49b9-a985-c34c79513351 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Waiting for the task: (returnval){ [ 2026.176542] env[62405]: value = "task-1948100" [ 2026.176542] env[62405]: _type = "Task" [ 2026.176542] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2026.182378] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Acquiring lock "407b40db-aa6e-4909-8c7a-3b2d69c59330" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2026.182634] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Lock "407b40db-aa6e-4909-8c7a-3b2d69c59330" acquired by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2026.192008] env[62405]: DEBUG oslo_vmware.api [None req-bc39d78b-ba71-49b9-a985-c34c79513351 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1948100, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2026.604773] env[62405]: DEBUG oslo_concurrency.lockutils [None req-15599116-f656-4d9e-b029-d73ca7b26be0 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Acquiring lock "c39d9059-8da4-4c8d-99ab-d66b8445e7da-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2026.605576] env[62405]: DEBUG oslo_concurrency.lockutils [None req-15599116-f656-4d9e-b029-d73ca7b26be0 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Lock "c39d9059-8da4-4c8d-99ab-d66b8445e7da-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2026.605576] env[62405]: DEBUG oslo_concurrency.lockutils [None req-15599116-f656-4d9e-b029-d73ca7b26be0 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Lock "c39d9059-8da4-4c8d-99ab-d66b8445e7da-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2026.689494] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Lock "407b40db-aa6e-4909-8c7a-3b2d69c59330" "released" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: held 0.506s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2026.689805] env[62405]: DEBUG nova.compute.manager [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] [instance: 2e6ca3c8-d7e1-4a5d-9fdd-aa944fd9c2f8] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2026.692398] env[62405]: DEBUG oslo_vmware.api [None req-bc39d78b-ba71-49b9-a985-c34c79513351 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1948100, 'name': PowerOffVM_Task, 'duration_secs': 0.216435} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2026.695584] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc39d78b-ba71-49b9-a985-c34c79513351 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 14512ed2-9eae-4753-b83c-8c0d0d5d9432] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2026.695584] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-bc39d78b-ba71-49b9-a985-c34c79513351 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 14512ed2-9eae-4753-b83c-8c0d0d5d9432] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2026.698701] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-adc2f765-e76e-4ff0-8f15-29a38bd56a95 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2026.710761] env[62405]: DEBUG oslo_vmware.rw_handles [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Completed reading data from the image iterator. {{(pid=62405) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2026.710761] env[62405]: DEBUG oslo_vmware.rw_handles [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52d0320f-6b74-cda3-b56f-ac923220e3ae/disk-0.vmdk. {{(pid=62405) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 2026.711433] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b515181-0083-46b9-bdba-e268b9506e44 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2026.718490] env[62405]: DEBUG oslo_vmware.rw_handles [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52d0320f-6b74-cda3-b56f-ac923220e3ae/disk-0.vmdk is in state: ready. {{(pid=62405) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 2026.718733] env[62405]: DEBUG oslo_vmware.rw_handles [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52d0320f-6b74-cda3-b56f-ac923220e3ae/disk-0.vmdk. {{(pid=62405) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 2026.719053] env[62405]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-fd7bf38e-b4fb-471c-8e4a-f6aa4bfde6e7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2026.793527] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Acquiring lock "58691f22-5acd-45db-b587-df784a000813" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2026.793769] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Lock "58691f22-5acd-45db-b587-df784a000813" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2026.847395] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e00f8f99-b083-42f7-b59d-28f7e9432ddb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Acquiring lock "798257f7-0590-4f82-82b0-d428cc6e6e92" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2026.847634] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e00f8f99-b083-42f7-b59d-28f7e9432ddb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Lock "798257f7-0590-4f82-82b0-d428cc6e6e92" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2026.847835] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e00f8f99-b083-42f7-b59d-28f7e9432ddb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Acquiring lock "798257f7-0590-4f82-82b0-d428cc6e6e92-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2026.848030] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e00f8f99-b083-42f7-b59d-28f7e9432ddb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Lock "798257f7-0590-4f82-82b0-d428cc6e6e92-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2026.848209] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e00f8f99-b083-42f7-b59d-28f7e9432ddb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Lock "798257f7-0590-4f82-82b0-d428cc6e6e92-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2026.850506] env[62405]: INFO nova.compute.manager [None req-e00f8f99-b083-42f7-b59d-28f7e9432ddb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 798257f7-0590-4f82-82b0-d428cc6e6e92] Terminating instance [ 2026.962856] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf32e812-58ac-4d29-b2d6-66fb9267739c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2026.970306] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79bf0ad4-4015-4574-84b4-5d27d45e5980 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.001483] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c48a10b-8da5-4317-b306-e170024f5420 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.004297] env[62405]: DEBUG oslo_vmware.rw_handles [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52d0320f-6b74-cda3-b56f-ac923220e3ae/disk-0.vmdk. {{(pid=62405) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 2027.004516] env[62405]: INFO nova.virt.vmwareapi.images [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Downloaded image file data ca187b4d-a52b-4628-a4f9-f6cf89613d47 [ 2027.005247] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec3572ee-e0af-4373-afaf-5cfba523cd56 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.022271] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70dae11c-75fb-4248-931f-9be6e57be8d9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.026111] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-680a3f02-c8bb-4d19-98b9-edbb9b96f757 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.036260] env[62405]: DEBUG nova.compute.provider_tree [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2027.045635] env[62405]: INFO nova.virt.vmwareapi.images [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] The imported VM was unregistered [ 2027.048114] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Caching image {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2027.048344] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Creating directory with path [datastore1] devstack-image-cache_base/ca187b4d-a52b-4628-a4f9-f6cf89613d47 {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2027.049130] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f4a45a41-ca01-4c72-b6e5-8755020806fa {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.058760] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Created directory with path [datastore1] devstack-image-cache_base/ca187b4d-a52b-4628-a4f9-f6cf89613d47 {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2027.058946] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_358dec73-c3ca-4a71-9ae8-696bc7dff254/OSTACK_IMG_358dec73-c3ca-4a71-9ae8-696bc7dff254.vmdk to [datastore1] devstack-image-cache_base/ca187b4d-a52b-4628-a4f9-f6cf89613d47/ca187b4d-a52b-4628-a4f9-f6cf89613d47.vmdk. {{(pid=62405) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 2027.059196] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-73606125-8d2b-4ac1-a478-dda9fd4b2ca6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.065151] env[62405]: DEBUG oslo_vmware.api [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Waiting for the task: (returnval){ [ 2027.065151] env[62405]: value = "task-1948103" [ 2027.065151] env[62405]: _type = "Task" [ 2027.065151] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2027.073926] env[62405]: DEBUG oslo_vmware.api [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948103, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2027.200640] env[62405]: DEBUG nova.compute.utils [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2027.202291] env[62405]: DEBUG nova.compute.manager [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] [instance: 2e6ca3c8-d7e1-4a5d-9fdd-aa944fd9c2f8] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2027.202517] env[62405]: DEBUG nova.network.neutron [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] [instance: 2e6ca3c8-d7e1-4a5d-9fdd-aa944fd9c2f8] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2027.296361] env[62405]: DEBUG nova.compute.manager [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: 58691f22-5acd-45db-b587-df784a000813] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2027.303108] env[62405]: DEBUG nova.policy [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'be503600ca0b40bdacc8c8767b919e1b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cb2a0fcacd6247ec811b95f61c752fb8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 2027.355032] env[62405]: DEBUG nova.compute.manager [None req-e00f8f99-b083-42f7-b59d-28f7e9432ddb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 798257f7-0590-4f82-82b0-d428cc6e6e92] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2027.355327] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-e00f8f99-b083-42f7-b59d-28f7e9432ddb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 798257f7-0590-4f82-82b0-d428cc6e6e92] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2027.356638] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ea29813-e07b-44ea-82cf-c462d911975b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.365137] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-e00f8f99-b083-42f7-b59d-28f7e9432ddb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 798257f7-0590-4f82-82b0-d428cc6e6e92] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2027.365451] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-edcd7ad3-bac6-4d31-8554-bb28b84e47d4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.539547] env[62405]: DEBUG nova.scheduler.client.report [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2027.576862] env[62405]: DEBUG oslo_vmware.api [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948103, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2027.653789] env[62405]: DEBUG oslo_concurrency.lockutils [None req-15599116-f656-4d9e-b029-d73ca7b26be0 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Acquiring lock "refresh_cache-c39d9059-8da4-4c8d-99ab-d66b8445e7da" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2027.654062] env[62405]: DEBUG oslo_concurrency.lockutils [None req-15599116-f656-4d9e-b029-d73ca7b26be0 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Acquired lock "refresh_cache-c39d9059-8da4-4c8d-99ab-d66b8445e7da" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2027.654337] env[62405]: DEBUG nova.network.neutron [None req-15599116-f656-4d9e-b029-d73ca7b26be0 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2027.706238] env[62405]: DEBUG nova.compute.manager [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] [instance: 2e6ca3c8-d7e1-4a5d-9fdd-aa944fd9c2f8] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2027.771948] env[62405]: DEBUG nova.network.neutron [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] [instance: 2e6ca3c8-d7e1-4a5d-9fdd-aa944fd9c2f8] Successfully created port: 6c5dc5af-ff6d-4205-a204-1c594c3c805a {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2027.829808] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2028.045552] env[62405]: DEBUG oslo_concurrency.lockutils [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.370s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2028.046097] env[62405]: DEBUG nova.compute.manager [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 46b794f6-e858-45e6-9977-98ab246482f3] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2028.050187] env[62405]: DEBUG oslo_concurrency.lockutils [None req-59294620-f26d-422e-a81c-03234b10992d tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.167s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2028.050187] env[62405]: DEBUG nova.objects.instance [None req-59294620-f26d-422e-a81c-03234b10992d tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Lazy-loading 'resources' on Instance uuid 15718289-5c19-4c2d-a9d8-d30ce0d63c68 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2028.077782] env[62405]: DEBUG oslo_vmware.api [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948103, 'name': MoveVirtualDisk_Task} progress is 43%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2028.359026] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7e4ec1a4-19ed-4de5-9afa-99a0a36c8f8a tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Acquiring lock "d186b2f4-3fd1-44be-b8a4-080972aff3a0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2028.359026] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7e4ec1a4-19ed-4de5-9afa-99a0a36c8f8a tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Lock "d186b2f4-3fd1-44be-b8a4-080972aff3a0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2028.359026] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7e4ec1a4-19ed-4de5-9afa-99a0a36c8f8a tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Acquiring lock "d186b2f4-3fd1-44be-b8a4-080972aff3a0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2028.359026] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7e4ec1a4-19ed-4de5-9afa-99a0a36c8f8a tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Lock "d186b2f4-3fd1-44be-b8a4-080972aff3a0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2028.359026] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7e4ec1a4-19ed-4de5-9afa-99a0a36c8f8a tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Lock "d186b2f4-3fd1-44be-b8a4-080972aff3a0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2028.360528] env[62405]: INFO nova.compute.manager [None req-7e4ec1a4-19ed-4de5-9afa-99a0a36c8f8a tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Terminating instance [ 2028.410060] env[62405]: DEBUG nova.network.neutron [None req-15599116-f656-4d9e-b029-d73ca7b26be0 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Updating instance_info_cache with network_info: [{"id": "2026016a-87b1-42ae-a04f-d95c5fb37377", "address": "fa:16:3e:bc:e8:85", "network": {"id": "3ca0a5a2-a18f-47fa-9d7b-0e27aa414878", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1312490542-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.142", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f3b50cc219314108945bfc8b2c21849a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7edb7c08-2fae-4df5-9ec6-5ccf06d7e337", "external-id": "nsx-vlan-transportzone-309", "segmentation_id": 309, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2026016a-87", "ovs_interfaceid": "2026016a-87b1-42ae-a04f-d95c5fb37377", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2028.552897] env[62405]: DEBUG nova.compute.utils [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2028.559233] env[62405]: DEBUG nova.compute.manager [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 46b794f6-e858-45e6-9977-98ab246482f3] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2028.559233] env[62405]: DEBUG nova.network.neutron [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 46b794f6-e858-45e6-9977-98ab246482f3] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2028.578142] env[62405]: DEBUG oslo_vmware.api [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948103, 'name': MoveVirtualDisk_Task} progress is 66%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2028.602304] env[62405]: DEBUG nova.policy [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b4ac1534df994c18bad62ec85acbc69f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '03a423f493034065bb1591d14d215ed8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 2028.719995] env[62405]: DEBUG nova.compute.manager [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] [instance: 2e6ca3c8-d7e1-4a5d-9fdd-aa944fd9c2f8] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2028.753317] env[62405]: DEBUG nova.virt.hardware [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2028.753574] env[62405]: DEBUG nova.virt.hardware [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2028.753735] env[62405]: DEBUG nova.virt.hardware [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2028.754013] env[62405]: DEBUG nova.virt.hardware [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2028.754106] env[62405]: DEBUG nova.virt.hardware [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2028.754253] env[62405]: DEBUG nova.virt.hardware [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2028.754745] env[62405]: DEBUG nova.virt.hardware [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2028.754820] env[62405]: DEBUG nova.virt.hardware [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2028.755099] env[62405]: DEBUG nova.virt.hardware [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2028.755402] env[62405]: DEBUG nova.virt.hardware [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2028.755748] env[62405]: DEBUG nova.virt.hardware [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2028.756698] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd5ccb63-6ae0-4c0b-8b71-887a97b6817e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.769377] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cbaedd5-ebcb-49b6-8c30-6eb51c3501bc {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.843015] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbfe7cae-1f9e-499e-aaa0-be6cddd5c9fc {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.853542] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e3d436a-caef-451c-9392-641faf1e57f0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.896971] env[62405]: DEBUG nova.compute.manager [None req-7e4ec1a4-19ed-4de5-9afa-99a0a36c8f8a tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2028.897782] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-7e4ec1a4-19ed-4de5-9afa-99a0a36c8f8a tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2028.902311] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a2cd3b3-c65f-4ce5-b79d-ce8912d1c5b0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.905089] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65354338-3ef5-4375-9176-243f55d4edd0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.914031] env[62405]: DEBUG oslo_concurrency.lockutils [None req-15599116-f656-4d9e-b029-d73ca7b26be0 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Releasing lock "refresh_cache-c39d9059-8da4-4c8d-99ab-d66b8445e7da" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2028.917669] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e4ec1a4-19ed-4de5-9afa-99a0a36c8f8a tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2028.920108] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b0e76254-d067-41a5-8438-afd74f9d9066 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.923822] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1113cc07-b8a2-4287-a9fd-563b0f5b6107 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.932371] env[62405]: DEBUG nova.network.neutron [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 46b794f6-e858-45e6-9977-98ab246482f3] Successfully created port: 1cb8209b-2a23-499d-b852-91ad4d89784e {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2028.943210] env[62405]: DEBUG nova.compute.provider_tree [None req-59294620-f26d-422e-a81c-03234b10992d tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2028.945889] env[62405]: DEBUG oslo_vmware.api [None req-7e4ec1a4-19ed-4de5-9afa-99a0a36c8f8a tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Waiting for the task: (returnval){ [ 2028.945889] env[62405]: value = "task-1948105" [ 2028.945889] env[62405]: _type = "Task" [ 2028.945889] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2028.956159] env[62405]: DEBUG oslo_vmware.api [None req-7e4ec1a4-19ed-4de5-9afa-99a0a36c8f8a tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Task: {'id': task-1948105, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2029.058230] env[62405]: DEBUG nova.compute.manager [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 46b794f6-e858-45e6-9977-98ab246482f3] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2029.076305] env[62405]: DEBUG oslo_vmware.api [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948103, 'name': MoveVirtualDisk_Task} progress is 85%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2029.430024] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d69d5644-71ed-4de6-add3-36dcf6316717 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2029.436024] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33ec4540-8828-4a72-b943-4400e6ee7902 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2029.449142] env[62405]: DEBUG nova.scheduler.client.report [None req-59294620-f26d-422e-a81c-03234b10992d tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2029.462257] env[62405]: DEBUG oslo_vmware.api [None req-7e4ec1a4-19ed-4de5-9afa-99a0a36c8f8a tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Task: {'id': task-1948105, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2029.576150] env[62405]: DEBUG oslo_vmware.api [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948103, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.370492} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2029.576417] env[62405]: INFO nova.virt.vmwareapi.ds_util [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_358dec73-c3ca-4a71-9ae8-696bc7dff254/OSTACK_IMG_358dec73-c3ca-4a71-9ae8-696bc7dff254.vmdk to [datastore1] devstack-image-cache_base/ca187b4d-a52b-4628-a4f9-f6cf89613d47/ca187b4d-a52b-4628-a4f9-f6cf89613d47.vmdk. [ 2029.576607] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Cleaning up location [datastore1] OSTACK_IMG_358dec73-c3ca-4a71-9ae8-696bc7dff254 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 2029.576815] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_358dec73-c3ca-4a71-9ae8-696bc7dff254 {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2029.577094] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e7e501b5-c2a3-44dc-a96d-b5fc7dd2148e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2029.582698] env[62405]: DEBUG oslo_vmware.api [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Waiting for the task: (returnval){ [ 2029.582698] env[62405]: value = "task-1948106" [ 2029.582698] env[62405]: _type = "Task" [ 2029.582698] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2029.591048] env[62405]: DEBUG oslo_vmware.api [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948106, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2029.957253] env[62405]: DEBUG oslo_concurrency.lockutils [None req-59294620-f26d-422e-a81c-03234b10992d tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.908s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2029.959334] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.130s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2029.960923] env[62405]: INFO nova.compute.claims [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: 58691f22-5acd-45db-b587-df784a000813] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2029.969028] env[62405]: DEBUG oslo_vmware.api [None req-7e4ec1a4-19ed-4de5-9afa-99a0a36c8f8a tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Task: {'id': task-1948105, 'name': PowerOffVM_Task, 'duration_secs': 0.536087} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2029.969261] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e4ec1a4-19ed-4de5-9afa-99a0a36c8f8a tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2029.969464] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-7e4ec1a4-19ed-4de5-9afa-99a0a36c8f8a tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2029.969734] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3ce21b10-ea94-4772-9f8a-a88adbfc9291 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2029.985844] env[62405]: INFO nova.scheduler.client.report [None req-59294620-f26d-422e-a81c-03234b10992d tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Deleted allocations for instance 15718289-5c19-4c2d-a9d8-d30ce0d63c68 [ 2030.068086] env[62405]: DEBUG nova.compute.manager [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 46b794f6-e858-45e6-9977-98ab246482f3] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2030.094802] env[62405]: DEBUG oslo_vmware.api [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948106, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.034763} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2030.096816] env[62405]: DEBUG nova.virt.hardware [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2030.097056] env[62405]: DEBUG nova.virt.hardware [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2030.097229] env[62405]: DEBUG nova.virt.hardware [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2030.097431] env[62405]: DEBUG nova.virt.hardware [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2030.097588] env[62405]: DEBUG nova.virt.hardware [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2030.097739] env[62405]: DEBUG nova.virt.hardware [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2030.097947] env[62405]: DEBUG nova.virt.hardware [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2030.098122] env[62405]: DEBUG nova.virt.hardware [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2030.098291] env[62405]: DEBUG nova.virt.hardware [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2030.098454] env[62405]: DEBUG nova.virt.hardware [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2030.098628] env[62405]: DEBUG nova.virt.hardware [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2030.098902] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2030.099074] env[62405]: DEBUG oslo_concurrency.lockutils [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ca187b4d-a52b-4628-a4f9-f6cf89613d47/ca187b4d-a52b-4628-a4f9-f6cf89613d47.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2030.099331] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ca187b4d-a52b-4628-a4f9-f6cf89613d47/ca187b4d-a52b-4628-a4f9-f6cf89613d47.vmdk to [datastore1] 6fcfada3-d73a-4814-bf45-d34b26d76d4a/6fcfada3-d73a-4814-bf45-d34b26d76d4a.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2030.100324] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad85c2fb-6af0-4f71-bd57-f6d7588c0b52 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2030.102662] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0dc3841e-2050-4dfc-b8af-c1d3d3c05ea3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2030.109172] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d0489ab-a4a6-4028-b7c6-2eb21972bcf1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2030.113990] env[62405]: DEBUG oslo_vmware.api [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Waiting for the task: (returnval){ [ 2030.113990] env[62405]: value = "task-1948108" [ 2030.113990] env[62405]: _type = "Task" [ 2030.113990] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2030.129449] env[62405]: DEBUG oslo_vmware.api [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948108, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2030.493472] env[62405]: DEBUG oslo_concurrency.lockutils [None req-59294620-f26d-422e-a81c-03234b10992d tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Lock "15718289-5c19-4c2d-a9d8-d30ce0d63c68" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.830s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2030.535422] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5dcb4ee-6645-4125-99bc-ef8d5542d9af {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2030.557039] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f90f947-9ef3-45b9-a7f4-5c0056549e56 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2030.564090] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-15599116-f656-4d9e-b029-d73ca7b26be0 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Updating instance 'c39d9059-8da4-4c8d-99ab-d66b8445e7da' progress to 83 {{(pid=62405) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2030.624644] env[62405]: DEBUG oslo_vmware.api [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948108, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2031.073086] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-15599116-f656-4d9e-b029-d73ca7b26be0 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2031.073549] env[62405]: DEBUG oslo_concurrency.lockutils [None req-20353284-96d8-458e-a74f-940b8b6e8389 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquiring lock "556e1bca-f2f1-4200-96df-997d48ce5a15" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2031.073814] env[62405]: DEBUG oslo_concurrency.lockutils [None req-20353284-96d8-458e-a74f-940b8b6e8389 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Lock "556e1bca-f2f1-4200-96df-997d48ce5a15" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2031.073982] env[62405]: DEBUG oslo_concurrency.lockutils [None req-20353284-96d8-458e-a74f-940b8b6e8389 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquiring lock "556e1bca-f2f1-4200-96df-997d48ce5a15-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2031.074206] env[62405]: DEBUG oslo_concurrency.lockutils [None req-20353284-96d8-458e-a74f-940b8b6e8389 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Lock "556e1bca-f2f1-4200-96df-997d48ce5a15-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2031.074348] env[62405]: DEBUG oslo_concurrency.lockutils [None req-20353284-96d8-458e-a74f-940b8b6e8389 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Lock "556e1bca-f2f1-4200-96df-997d48ce5a15-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2031.078653] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-372aa635-0606-4ffe-a38d-e08340be8e28 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2031.080818] env[62405]: INFO nova.compute.manager [None req-20353284-96d8-458e-a74f-940b8b6e8389 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] Terminating instance [ 2031.089563] env[62405]: DEBUG oslo_vmware.api [None req-15599116-f656-4d9e-b029-d73ca7b26be0 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for the task: (returnval){ [ 2031.089563] env[62405]: value = "task-1948109" [ 2031.089563] env[62405]: _type = "Task" [ 2031.089563] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2031.101499] env[62405]: DEBUG oslo_vmware.api [None req-15599116-f656-4d9e-b029-d73ca7b26be0 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948109, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2031.129528] env[62405]: DEBUG oslo_vmware.api [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948108, 'name': CopyVirtualDisk_Task} progress is 24%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2031.221990] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11eac9ac-e962-465f-bc1e-a7fd8f72362e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2031.232328] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65954a75-f2e1-410c-85c3-e850b3d8f39a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2031.263452] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a6aab12-8328-4303-b34c-ec675783b922 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2031.271489] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f500fcc8-18af-4b88-bdca-2d4e6fd3347a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2031.285258] env[62405]: DEBUG nova.compute.provider_tree [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2031.584858] env[62405]: DEBUG nova.compute.manager [None req-20353284-96d8-458e-a74f-940b8b6e8389 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2031.585165] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-20353284-96d8-458e-a74f-940b8b6e8389 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2031.586106] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50e41364-12c9-40da-83b3-9d3f4db7bad9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2031.595375] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-20353284-96d8-458e-a74f-940b8b6e8389 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2031.596340] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-42f92339-5db1-4417-b1dd-fbf36cf108fe {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2031.602660] env[62405]: DEBUG oslo_vmware.api [None req-15599116-f656-4d9e-b029-d73ca7b26be0 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948109, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2031.603984] env[62405]: DEBUG oslo_vmware.api [None req-20353284-96d8-458e-a74f-940b8b6e8389 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Waiting for the task: (returnval){ [ 2031.603984] env[62405]: value = "task-1948110" [ 2031.603984] env[62405]: _type = "Task" [ 2031.603984] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2031.613029] env[62405]: DEBUG oslo_vmware.api [None req-20353284-96d8-458e-a74f-940b8b6e8389 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1948110, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2031.625126] env[62405]: DEBUG oslo_vmware.api [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948108, 'name': CopyVirtualDisk_Task} progress is 43%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2031.788944] env[62405]: DEBUG nova.scheduler.client.report [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2032.102509] env[62405]: DEBUG oslo_vmware.api [None req-15599116-f656-4d9e-b029-d73ca7b26be0 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948109, 'name': PowerOnVM_Task, 'duration_secs': 0.886716} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2032.102847] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-15599116-f656-4d9e-b029-d73ca7b26be0 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2032.102997] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-15599116-f656-4d9e-b029-d73ca7b26be0 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Updating instance 'c39d9059-8da4-4c8d-99ab-d66b8445e7da' progress to 100 {{(pid=62405) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2032.115100] env[62405]: DEBUG oslo_vmware.api [None req-20353284-96d8-458e-a74f-940b8b6e8389 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1948110, 'name': PowerOffVM_Task, 'duration_secs': 0.196936} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2032.115384] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-20353284-96d8-458e-a74f-940b8b6e8389 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2032.115496] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-20353284-96d8-458e-a74f-940b8b6e8389 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2032.115764] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-902a8ed3-10f0-4ce6-ad1d-276ca245dd88 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2032.127853] env[62405]: DEBUG oslo_vmware.api [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948108, 'name': CopyVirtualDisk_Task} progress is 63%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2032.141907] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-7e4ec1a4-19ed-4de5-9afa-99a0a36c8f8a tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2032.142175] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-7e4ec1a4-19ed-4de5-9afa-99a0a36c8f8a tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2032.142379] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e4ec1a4-19ed-4de5-9afa-99a0a36c8f8a tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Deleting the datastore file [datastore1] d186b2f4-3fd1-44be-b8a4-080972aff3a0 {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2032.142676] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1fa552b3-fa0e-4033-9cf8-151e035a6cfd {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2032.150073] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-bc39d78b-ba71-49b9-a985-c34c79513351 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 14512ed2-9eae-4753-b83c-8c0d0d5d9432] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2032.150314] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-bc39d78b-ba71-49b9-a985-c34c79513351 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 14512ed2-9eae-4753-b83c-8c0d0d5d9432] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2032.150507] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc39d78b-ba71-49b9-a985-c34c79513351 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Deleting the datastore file [datastore1] 14512ed2-9eae-4753-b83c-8c0d0d5d9432 {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2032.150768] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-59fcebda-61e9-41b9-a9c2-46d9e3a249ed {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2032.155541] env[62405]: DEBUG oslo_vmware.api [None req-7e4ec1a4-19ed-4de5-9afa-99a0a36c8f8a tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Waiting for the task: (returnval){ [ 2032.155541] env[62405]: value = "task-1948112" [ 2032.155541] env[62405]: _type = "Task" [ 2032.155541] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2032.160749] env[62405]: DEBUG oslo_vmware.api [None req-bc39d78b-ba71-49b9-a985-c34c79513351 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Waiting for the task: (returnval){ [ 2032.160749] env[62405]: value = "task-1948113" [ 2032.160749] env[62405]: _type = "Task" [ 2032.160749] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2032.170879] env[62405]: DEBUG oslo_vmware.api [None req-bc39d78b-ba71-49b9-a985-c34c79513351 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1948113, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2032.174818] env[62405]: DEBUG oslo_vmware.api [None req-7e4ec1a4-19ed-4de5-9afa-99a0a36c8f8a tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Task: {'id': task-1948112, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2032.175018] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-3732fbaf-503d-4913-b5b9-17576fca5005 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 41e5385d-f0c7-4431-8424-e60dbeebaf8e] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2032.175220] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-3732fbaf-503d-4913-b5b9-17576fca5005 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 41e5385d-f0c7-4431-8424-e60dbeebaf8e] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2032.175396] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-3732fbaf-503d-4913-b5b9-17576fca5005 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Deleting the datastore file [datastore1] 41e5385d-f0c7-4431-8424-e60dbeebaf8e {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2032.175649] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f0cab85f-239f-4205-8fe9-27e5a4947c80 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2032.186693] env[62405]: DEBUG oslo_vmware.api [None req-3732fbaf-503d-4913-b5b9-17576fca5005 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Waiting for the task: (returnval){ [ 2032.186693] env[62405]: value = "task-1948114" [ 2032.186693] env[62405]: _type = "Task" [ 2032.186693] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2032.196354] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-e00f8f99-b083-42f7-b59d-28f7e9432ddb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 798257f7-0590-4f82-82b0-d428cc6e6e92] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2032.196655] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-e00f8f99-b083-42f7-b59d-28f7e9432ddb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 798257f7-0590-4f82-82b0-d428cc6e6e92] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2032.196890] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-e00f8f99-b083-42f7-b59d-28f7e9432ddb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Deleting the datastore file [datastore1] 798257f7-0590-4f82-82b0-d428cc6e6e92 {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2032.197249] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-124e174d-7268-413b-88c9-05c2165005f7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2032.202833] env[62405]: DEBUG oslo_vmware.api [None req-3732fbaf-503d-4913-b5b9-17576fca5005 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1948114, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2032.210638] env[62405]: DEBUG oslo_vmware.api [None req-e00f8f99-b083-42f7-b59d-28f7e9432ddb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Waiting for the task: (returnval){ [ 2032.210638] env[62405]: value = "task-1948115" [ 2032.210638] env[62405]: _type = "Task" [ 2032.210638] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2032.221163] env[62405]: DEBUG oslo_vmware.api [None req-e00f8f99-b083-42f7-b59d-28f7e9432ddb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1948115, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2032.242821] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-20353284-96d8-458e-a74f-940b8b6e8389 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2032.243099] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-20353284-96d8-458e-a74f-940b8b6e8389 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2032.243270] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-20353284-96d8-458e-a74f-940b8b6e8389 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Deleting the datastore file [datastore1] 556e1bca-f2f1-4200-96df-997d48ce5a15 {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2032.243552] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-23f395f4-7f3d-486f-84f5-a40cdd969392 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2032.250770] env[62405]: DEBUG oslo_vmware.api [None req-20353284-96d8-458e-a74f-940b8b6e8389 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Waiting for the task: (returnval){ [ 2032.250770] env[62405]: value = "task-1948116" [ 2032.250770] env[62405]: _type = "Task" [ 2032.250770] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2032.259072] env[62405]: DEBUG oslo_vmware.api [None req-20353284-96d8-458e-a74f-940b8b6e8389 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1948116, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2032.293960] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.335s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2032.294554] env[62405]: DEBUG nova.compute.manager [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: 58691f22-5acd-45db-b587-df784a000813] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2032.630121] env[62405]: DEBUG oslo_vmware.api [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948108, 'name': CopyVirtualDisk_Task} progress is 85%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2032.671313] env[62405]: DEBUG oslo_vmware.api [None req-7e4ec1a4-19ed-4de5-9afa-99a0a36c8f8a tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Task: {'id': task-1948112, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2032.674706] env[62405]: DEBUG oslo_vmware.api [None req-bc39d78b-ba71-49b9-a985-c34c79513351 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1948113, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2032.698055] env[62405]: DEBUG oslo_vmware.api [None req-3732fbaf-503d-4913-b5b9-17576fca5005 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1948114, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2032.720467] env[62405]: DEBUG oslo_vmware.api [None req-e00f8f99-b083-42f7-b59d-28f7e9432ddb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1948115, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2032.762082] env[62405]: DEBUG oslo_vmware.api [None req-20353284-96d8-458e-a74f-940b8b6e8389 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1948116, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2032.799902] env[62405]: DEBUG nova.compute.utils [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2032.805023] env[62405]: DEBUG nova.compute.manager [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: 58691f22-5acd-45db-b587-df784a000813] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2032.805023] env[62405]: DEBUG nova.network.neutron [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: 58691f22-5acd-45db-b587-df784a000813] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2032.883085] env[62405]: DEBUG nova.policy [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd9a2f1473c194340a88b2a94b70eb754', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4a78c04608454ac88ecb97b4c87a9d17', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 2033.131436] env[62405]: DEBUG oslo_vmware.api [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948108, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.89383} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2033.131436] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ca187b4d-a52b-4628-a4f9-f6cf89613d47/ca187b4d-a52b-4628-a4f9-f6cf89613d47.vmdk to [datastore1] 6fcfada3-d73a-4814-bf45-d34b26d76d4a/6fcfada3-d73a-4814-bf45-d34b26d76d4a.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2033.132286] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffa19740-8076-4246-93fe-241da4a7a6ce {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2033.156675] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Reconfiguring VM instance instance-0000005d to attach disk [datastore1] 6fcfada3-d73a-4814-bf45-d34b26d76d4a/6fcfada3-d73a-4814-bf45-d34b26d76d4a.vmdk or device None with type streamOptimized {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2033.157304] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c16536d3-3526-4097-b794-3272505482e6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2033.190297] env[62405]: DEBUG oslo_vmware.api [None req-bc39d78b-ba71-49b9-a985-c34c79513351 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1948113, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.93649} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2033.196731] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc39d78b-ba71-49b9-a985-c34c79513351 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2033.197045] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-bc39d78b-ba71-49b9-a985-c34c79513351 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 14512ed2-9eae-4753-b83c-8c0d0d5d9432] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2033.197308] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-bc39d78b-ba71-49b9-a985-c34c79513351 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 14512ed2-9eae-4753-b83c-8c0d0d5d9432] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2033.197552] env[62405]: INFO nova.compute.manager [None req-bc39d78b-ba71-49b9-a985-c34c79513351 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 14512ed2-9eae-4753-b83c-8c0d0d5d9432] Took 7.04 seconds to destroy the instance on the hypervisor. [ 2033.197860] env[62405]: DEBUG oslo.service.loopingcall [None req-bc39d78b-ba71-49b9-a985-c34c79513351 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2033.198211] env[62405]: DEBUG oslo_vmware.api [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Waiting for the task: (returnval){ [ 2033.198211] env[62405]: value = "task-1948117" [ 2033.198211] env[62405]: _type = "Task" [ 2033.198211] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2033.198527] env[62405]: DEBUG oslo_vmware.api [None req-7e4ec1a4-19ed-4de5-9afa-99a0a36c8f8a tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Task: {'id': task-1948112, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.965737} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2033.198798] env[62405]: DEBUG nova.compute.manager [-] [instance: 14512ed2-9eae-4753-b83c-8c0d0d5d9432] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2033.198956] env[62405]: DEBUG nova.network.neutron [-] [instance: 14512ed2-9eae-4753-b83c-8c0d0d5d9432] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2033.200860] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e4ec1a4-19ed-4de5-9afa-99a0a36c8f8a tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2033.201135] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-7e4ec1a4-19ed-4de5-9afa-99a0a36c8f8a tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2033.201380] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-7e4ec1a4-19ed-4de5-9afa-99a0a36c8f8a tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2033.201782] env[62405]: INFO nova.compute.manager [None req-7e4ec1a4-19ed-4de5-9afa-99a0a36c8f8a tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Took 4.30 seconds to destroy the instance on the hypervisor. [ 2033.202078] env[62405]: DEBUG oslo.service.loopingcall [None req-7e4ec1a4-19ed-4de5-9afa-99a0a36c8f8a tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2033.205849] env[62405]: DEBUG nova.compute.manager [-] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2033.206098] env[62405]: DEBUG nova.network.neutron [-] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2033.211296] env[62405]: DEBUG oslo_vmware.api [None req-3732fbaf-503d-4913-b5b9-17576fca5005 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1948114, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.914624} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2033.212243] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-3732fbaf-503d-4913-b5b9-17576fca5005 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2033.212547] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-3732fbaf-503d-4913-b5b9-17576fca5005 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 41e5385d-f0c7-4431-8424-e60dbeebaf8e] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2033.212829] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-3732fbaf-503d-4913-b5b9-17576fca5005 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 41e5385d-f0c7-4431-8424-e60dbeebaf8e] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2033.213183] env[62405]: INFO nova.compute.manager [None req-3732fbaf-503d-4913-b5b9-17576fca5005 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 41e5385d-f0c7-4431-8424-e60dbeebaf8e] Took 8.07 seconds to destroy the instance on the hypervisor. [ 2033.213949] env[62405]: DEBUG oslo.service.loopingcall [None req-3732fbaf-503d-4913-b5b9-17576fca5005 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2033.217798] env[62405]: DEBUG nova.compute.manager [-] [instance: 41e5385d-f0c7-4431-8424-e60dbeebaf8e] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2033.217960] env[62405]: DEBUG nova.network.neutron [-] [instance: 41e5385d-f0c7-4431-8424-e60dbeebaf8e] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2033.220260] env[62405]: DEBUG oslo_vmware.api [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948117, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2033.230245] env[62405]: DEBUG oslo_vmware.api [None req-e00f8f99-b083-42f7-b59d-28f7e9432ddb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1948115, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.878799} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2033.230532] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-e00f8f99-b083-42f7-b59d-28f7e9432ddb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2033.230756] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-e00f8f99-b083-42f7-b59d-28f7e9432ddb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 798257f7-0590-4f82-82b0-d428cc6e6e92] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2033.230968] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-e00f8f99-b083-42f7-b59d-28f7e9432ddb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 798257f7-0590-4f82-82b0-d428cc6e6e92] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2033.231219] env[62405]: INFO nova.compute.manager [None req-e00f8f99-b083-42f7-b59d-28f7e9432ddb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 798257f7-0590-4f82-82b0-d428cc6e6e92] Took 5.88 seconds to destroy the instance on the hypervisor. [ 2033.231499] env[62405]: DEBUG oslo.service.loopingcall [None req-e00f8f99-b083-42f7-b59d-28f7e9432ddb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2033.231763] env[62405]: DEBUG nova.compute.manager [-] [instance: 798257f7-0590-4f82-82b0-d428cc6e6e92] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2033.231867] env[62405]: DEBUG nova.network.neutron [-] [instance: 798257f7-0590-4f82-82b0-d428cc6e6e92] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2033.236998] env[62405]: DEBUG nova.compute.manager [req-2750821e-dd6a-4948-b9c2-d477b75685c8 req-68505b38-fd48-4433-8354-0fce02290d6d service nova] [instance: 46b794f6-e858-45e6-9977-98ab246482f3] Received event network-vif-plugged-1cb8209b-2a23-499d-b852-91ad4d89784e {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2033.236998] env[62405]: DEBUG oslo_concurrency.lockutils [req-2750821e-dd6a-4948-b9c2-d477b75685c8 req-68505b38-fd48-4433-8354-0fce02290d6d service nova] Acquiring lock "46b794f6-e858-45e6-9977-98ab246482f3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2033.236998] env[62405]: DEBUG oslo_concurrency.lockutils [req-2750821e-dd6a-4948-b9c2-d477b75685c8 req-68505b38-fd48-4433-8354-0fce02290d6d service nova] Lock "46b794f6-e858-45e6-9977-98ab246482f3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2033.237198] env[62405]: DEBUG oslo_concurrency.lockutils [req-2750821e-dd6a-4948-b9c2-d477b75685c8 req-68505b38-fd48-4433-8354-0fce02290d6d service nova] Lock "46b794f6-e858-45e6-9977-98ab246482f3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2033.237451] env[62405]: DEBUG nova.compute.manager [req-2750821e-dd6a-4948-b9c2-d477b75685c8 req-68505b38-fd48-4433-8354-0fce02290d6d service nova] [instance: 46b794f6-e858-45e6-9977-98ab246482f3] No waiting events found dispatching network-vif-plugged-1cb8209b-2a23-499d-b852-91ad4d89784e {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2033.237655] env[62405]: WARNING nova.compute.manager [req-2750821e-dd6a-4948-b9c2-d477b75685c8 req-68505b38-fd48-4433-8354-0fce02290d6d service nova] [instance: 46b794f6-e858-45e6-9977-98ab246482f3] Received unexpected event network-vif-plugged-1cb8209b-2a23-499d-b852-91ad4d89784e for instance with vm_state building and task_state spawning. [ 2033.262442] env[62405]: DEBUG oslo_vmware.api [None req-20353284-96d8-458e-a74f-940b8b6e8389 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Task: {'id': task-1948116, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.860733} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2033.262442] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-20353284-96d8-458e-a74f-940b8b6e8389 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2033.262575] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-20353284-96d8-458e-a74f-940b8b6e8389 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2033.262799] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-20353284-96d8-458e-a74f-940b8b6e8389 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2033.263065] env[62405]: INFO nova.compute.manager [None req-20353284-96d8-458e-a74f-940b8b6e8389 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] Took 1.68 seconds to destroy the instance on the hypervisor. [ 2033.263229] env[62405]: DEBUG oslo.service.loopingcall [None req-20353284-96d8-458e-a74f-940b8b6e8389 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2033.263461] env[62405]: DEBUG nova.compute.manager [-] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2033.263461] env[62405]: DEBUG nova.network.neutron [-] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2033.304610] env[62405]: DEBUG nova.compute.manager [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: 58691f22-5acd-45db-b587-df784a000813] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2033.507606] env[62405]: DEBUG oslo_concurrency.lockutils [None req-808d3590-cf6d-4efe-9fde-aeca2bdcd299 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Acquiring lock "81d9be97-9147-4754-80c2-68c1a389842e" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2033.507922] env[62405]: DEBUG oslo_concurrency.lockutils [None req-808d3590-cf6d-4efe-9fde-aeca2bdcd299 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Lock "81d9be97-9147-4754-80c2-68c1a389842e" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2033.579139] env[62405]: DEBUG nova.network.neutron [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 46b794f6-e858-45e6-9977-98ab246482f3] Successfully updated port: 1cb8209b-2a23-499d-b852-91ad4d89784e {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2033.608370] env[62405]: DEBUG nova.compute.manager [req-c3ecfb78-d0ca-42d0-bd39-4fbe453c9490 req-8eda34f2-c403-4336-9c9f-a70a1b84c6ca service nova] [instance: 2e6ca3c8-d7e1-4a5d-9fdd-aa944fd9c2f8] Received event network-vif-plugged-6c5dc5af-ff6d-4205-a204-1c594c3c805a {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2033.608370] env[62405]: DEBUG oslo_concurrency.lockutils [req-c3ecfb78-d0ca-42d0-bd39-4fbe453c9490 req-8eda34f2-c403-4336-9c9f-a70a1b84c6ca service nova] Acquiring lock "2e6ca3c8-d7e1-4a5d-9fdd-aa944fd9c2f8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2033.608370] env[62405]: DEBUG oslo_concurrency.lockutils [req-c3ecfb78-d0ca-42d0-bd39-4fbe453c9490 req-8eda34f2-c403-4336-9c9f-a70a1b84c6ca service nova] Lock "2e6ca3c8-d7e1-4a5d-9fdd-aa944fd9c2f8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2033.608370] env[62405]: DEBUG oslo_concurrency.lockutils [req-c3ecfb78-d0ca-42d0-bd39-4fbe453c9490 req-8eda34f2-c403-4336-9c9f-a70a1b84c6ca service nova] Lock "2e6ca3c8-d7e1-4a5d-9fdd-aa944fd9c2f8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2033.608370] env[62405]: DEBUG nova.compute.manager [req-c3ecfb78-d0ca-42d0-bd39-4fbe453c9490 req-8eda34f2-c403-4336-9c9f-a70a1b84c6ca service nova] [instance: 2e6ca3c8-d7e1-4a5d-9fdd-aa944fd9c2f8] No waiting events found dispatching network-vif-plugged-6c5dc5af-ff6d-4205-a204-1c594c3c805a {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2033.608370] env[62405]: WARNING nova.compute.manager [req-c3ecfb78-d0ca-42d0-bd39-4fbe453c9490 req-8eda34f2-c403-4336-9c9f-a70a1b84c6ca service nova] [instance: 2e6ca3c8-d7e1-4a5d-9fdd-aa944fd9c2f8] Received unexpected event network-vif-plugged-6c5dc5af-ff6d-4205-a204-1c594c3c805a for instance with vm_state building and task_state spawning. [ 2033.710834] env[62405]: DEBUG oslo_vmware.api [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948117, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2033.730602] env[62405]: DEBUG nova.network.neutron [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] [instance: 2e6ca3c8-d7e1-4a5d-9fdd-aa944fd9c2f8] Successfully updated port: 6c5dc5af-ff6d-4205-a204-1c594c3c805a {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2033.760297] env[62405]: DEBUG nova.network.neutron [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: 58691f22-5acd-45db-b587-df784a000813] Successfully created port: c38487c8-b41a-4c0c-8103-3392186dbdee {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2034.013665] env[62405]: DEBUG nova.compute.utils [None req-808d3590-cf6d-4efe-9fde-aeca2bdcd299 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2034.079322] env[62405]: DEBUG oslo_concurrency.lockutils [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Acquiring lock "refresh_cache-46b794f6-e858-45e6-9977-98ab246482f3" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2034.079322] env[62405]: DEBUG oslo_concurrency.lockutils [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Acquired lock "refresh_cache-46b794f6-e858-45e6-9977-98ab246482f3" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2034.079322] env[62405]: DEBUG nova.network.neutron [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 46b794f6-e858-45e6-9977-98ab246482f3] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2034.104630] env[62405]: DEBUG nova.compute.manager [req-4098e68b-509b-4695-8500-49d9defc2d0d req-4c7e6f91-c1a9-4b03-a82c-5188e4d364c2 service nova] [instance: 2e6ca3c8-d7e1-4a5d-9fdd-aa944fd9c2f8] Received event network-changed-6c5dc5af-ff6d-4205-a204-1c594c3c805a {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2034.104630] env[62405]: DEBUG nova.compute.manager [req-4098e68b-509b-4695-8500-49d9defc2d0d req-4c7e6f91-c1a9-4b03-a82c-5188e4d364c2 service nova] [instance: 2e6ca3c8-d7e1-4a5d-9fdd-aa944fd9c2f8] Refreshing instance network info cache due to event network-changed-6c5dc5af-ff6d-4205-a204-1c594c3c805a. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 2034.104630] env[62405]: DEBUG oslo_concurrency.lockutils [req-4098e68b-509b-4695-8500-49d9defc2d0d req-4c7e6f91-c1a9-4b03-a82c-5188e4d364c2 service nova] Acquiring lock "refresh_cache-2e6ca3c8-d7e1-4a5d-9fdd-aa944fd9c2f8" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2034.105061] env[62405]: DEBUG oslo_concurrency.lockutils [req-4098e68b-509b-4695-8500-49d9defc2d0d req-4c7e6f91-c1a9-4b03-a82c-5188e4d364c2 service nova] Acquired lock "refresh_cache-2e6ca3c8-d7e1-4a5d-9fdd-aa944fd9c2f8" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2034.105061] env[62405]: DEBUG nova.network.neutron [req-4098e68b-509b-4695-8500-49d9defc2d0d req-4c7e6f91-c1a9-4b03-a82c-5188e4d364c2 service nova] [instance: 2e6ca3c8-d7e1-4a5d-9fdd-aa944fd9c2f8] Refreshing network info cache for port 6c5dc5af-ff6d-4205-a204-1c594c3c805a {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2034.210952] env[62405]: DEBUG oslo_vmware.api [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948117, 'name': ReconfigVM_Task, 'duration_secs': 0.950987} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2034.211317] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Reconfigured VM instance instance-0000005d to attach disk [datastore1] 6fcfada3-d73a-4814-bf45-d34b26d76d4a/6fcfada3-d73a-4814-bf45-d34b26d76d4a.vmdk or device None with type streamOptimized {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2034.212769] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'encryption_options': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'guest_format': None, 'device_name': '/dev/sda', 'encrypted': False, 'device_type': 'disk', 'size': 0, 'disk_bus': None, 'encryption_format': None, 'image_id': 'e6bba7a8-c2de-41dc-871a-3859bba5f4f9'}], 'ephemerals': [], 'block_device_mapping': [{'boot_index': None, 'delete_on_termination': False, 'guest_format': None, 'mount_device': '/dev/sdb', 'device_type': None, 'disk_bus': None, 'attachment_id': '39a45a65-ce69-4c19-aa23-f1ef302926f3', 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-401573', 'volume_id': 'c64ac26e-4f56-4aad-931f-053141f488c8', 'name': 'volume-c64ac26e-4f56-4aad-931f-053141f488c8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': '6fcfada3-d73a-4814-bf45-d34b26d76d4a', 'attached_at': '', 'detached_at': '', 'volume_id': 'c64ac26e-4f56-4aad-931f-053141f488c8', 'serial': 'c64ac26e-4f56-4aad-931f-053141f488c8'}, 'volume_type': None}], 'swap': None} {{(pid=62405) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 2034.213030] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Volume attach. Driver type: vmdk {{(pid=62405) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 2034.213274] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-401573', 'volume_id': 'c64ac26e-4f56-4aad-931f-053141f488c8', 'name': 'volume-c64ac26e-4f56-4aad-931f-053141f488c8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': '6fcfada3-d73a-4814-bf45-d34b26d76d4a', 'attached_at': '', 'detached_at': '', 'volume_id': 'c64ac26e-4f56-4aad-931f-053141f488c8', 'serial': 'c64ac26e-4f56-4aad-931f-053141f488c8'} {{(pid=62405) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 2034.214083] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a85d4f71-a96f-47f8-9ca4-561604a8070a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2034.231318] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37b91309-1929-4935-82fd-9d8b8243dea6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2034.234572] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Acquiring lock "refresh_cache-2e6ca3c8-d7e1-4a5d-9fdd-aa944fd9c2f8" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2034.259503] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Reconfiguring VM instance instance-0000005d to attach disk [datastore1] volume-c64ac26e-4f56-4aad-931f-053141f488c8/volume-c64ac26e-4f56-4aad-931f-053141f488c8.vmdk or device None with type thin {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2034.259849] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-74b2183a-444b-41ec-9370-7965ea1b8552 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2034.278954] env[62405]: DEBUG oslo_vmware.api [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Waiting for the task: (returnval){ [ 2034.278954] env[62405]: value = "task-1948118" [ 2034.278954] env[62405]: _type = "Task" [ 2034.278954] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2034.287530] env[62405]: DEBUG oslo_vmware.api [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948118, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2034.314659] env[62405]: DEBUG nova.compute.manager [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: 58691f22-5acd-45db-b587-df784a000813] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2034.345747] env[62405]: DEBUG nova.virt.hardware [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2034.345747] env[62405]: DEBUG nova.virt.hardware [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2034.345747] env[62405]: DEBUG nova.virt.hardware [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2034.345747] env[62405]: DEBUG nova.virt.hardware [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2034.345747] env[62405]: DEBUG nova.virt.hardware [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2034.345747] env[62405]: DEBUG nova.virt.hardware [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2034.345747] env[62405]: DEBUG nova.virt.hardware [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2034.345747] env[62405]: DEBUG nova.virt.hardware [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2034.345747] env[62405]: DEBUG nova.virt.hardware [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2034.345747] env[62405]: DEBUG nova.virt.hardware [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2034.345747] env[62405]: DEBUG nova.virt.hardware [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2034.346901] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-381b2a51-a7ee-48fd-a991-2a81b1afbeb7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2034.356225] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-698953dd-e3d2-4fcf-86f0-fe8bdad167a1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2034.363938] env[62405]: DEBUG nova.network.neutron [-] [instance: 798257f7-0590-4f82-82b0-d428cc6e6e92] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2034.380421] env[62405]: DEBUG nova.network.neutron [-] [instance: 41e5385d-f0c7-4431-8424-e60dbeebaf8e] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2034.438071] env[62405]: DEBUG nova.network.neutron [-] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2034.520716] env[62405]: DEBUG oslo_concurrency.lockutils [None req-808d3590-cf6d-4efe-9fde-aeca2bdcd299 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Lock "81d9be97-9147-4754-80c2-68c1a389842e" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.013s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2034.616161] env[62405]: DEBUG nova.network.neutron [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 46b794f6-e858-45e6-9977-98ab246482f3] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2034.673755] env[62405]: DEBUG nova.network.neutron [req-4098e68b-509b-4695-8500-49d9defc2d0d req-4c7e6f91-c1a9-4b03-a82c-5188e4d364c2 service nova] [instance: 2e6ca3c8-d7e1-4a5d-9fdd-aa944fd9c2f8] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2034.792674] env[62405]: DEBUG oslo_vmware.api [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948118, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2034.851125] env[62405]: DEBUG nova.network.neutron [req-4098e68b-509b-4695-8500-49d9defc2d0d req-4c7e6f91-c1a9-4b03-a82c-5188e4d364c2 service nova] [instance: 2e6ca3c8-d7e1-4a5d-9fdd-aa944fd9c2f8] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2034.866835] env[62405]: INFO nova.compute.manager [-] [instance: 798257f7-0590-4f82-82b0-d428cc6e6e92] Took 1.63 seconds to deallocate network for instance. [ 2034.882641] env[62405]: INFO nova.compute.manager [-] [instance: 41e5385d-f0c7-4431-8424-e60dbeebaf8e] Took 1.66 seconds to deallocate network for instance. [ 2034.911241] env[62405]: DEBUG nova.network.neutron [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 46b794f6-e858-45e6-9977-98ab246482f3] Updating instance_info_cache with network_info: [{"id": "1cb8209b-2a23-499d-b852-91ad4d89784e", "address": "fa:16:3e:c5:15:8c", "network": {"id": "a7be14dc-1fb5-4a85-a574-ff7086958535", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-359688506-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03a423f493034065bb1591d14d215ed8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77aa121f-8fb6-42f3-aaea-43addfe449b2", "external-id": "nsx-vlan-transportzone-288", "segmentation_id": 288, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1cb8209b-2a", "ovs_interfaceid": "1cb8209b-2a23-499d-b852-91ad4d89784e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2034.919207] env[62405]: DEBUG nova.network.neutron [-] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2034.933581] env[62405]: DEBUG nova.network.neutron [-] [instance: 14512ed2-9eae-4753-b83c-8c0d0d5d9432] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2034.944902] env[62405]: INFO nova.compute.manager [-] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] Took 1.68 seconds to deallocate network for instance. [ 2035.289797] env[62405]: DEBUG oslo_vmware.api [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948118, 'name': ReconfigVM_Task, 'duration_secs': 0.659528} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2035.290120] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Reconfigured VM instance instance-0000005d to attach disk [datastore1] volume-c64ac26e-4f56-4aad-931f-053141f488c8/volume-c64ac26e-4f56-4aad-931f-053141f488c8.vmdk or device None with type thin {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2035.294876] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-83eaa48d-3de7-4cae-a509-93969266f4b7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.313967] env[62405]: DEBUG oslo_vmware.api [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Waiting for the task: (returnval){ [ 2035.313967] env[62405]: value = "task-1948119" [ 2035.313967] env[62405]: _type = "Task" [ 2035.313967] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2035.323716] env[62405]: DEBUG oslo_vmware.api [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948119, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2035.354955] env[62405]: DEBUG oslo_concurrency.lockutils [req-4098e68b-509b-4695-8500-49d9defc2d0d req-4c7e6f91-c1a9-4b03-a82c-5188e4d364c2 service nova] Releasing lock "refresh_cache-2e6ca3c8-d7e1-4a5d-9fdd-aa944fd9c2f8" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2035.355365] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Acquired lock "refresh_cache-2e6ca3c8-d7e1-4a5d-9fdd-aa944fd9c2f8" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2035.355539] env[62405]: DEBUG nova.network.neutron [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] [instance: 2e6ca3c8-d7e1-4a5d-9fdd-aa944fd9c2f8] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2035.365986] env[62405]: DEBUG nova.network.neutron [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: 58691f22-5acd-45db-b587-df784a000813] Successfully updated port: c38487c8-b41a-4c0c-8103-3392186dbdee {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2035.377984] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e00f8f99-b083-42f7-b59d-28f7e9432ddb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2035.378276] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e00f8f99-b083-42f7-b59d-28f7e9432ddb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2035.378497] env[62405]: DEBUG nova.objects.instance [None req-e00f8f99-b083-42f7-b59d-28f7e9432ddb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Lazy-loading 'resources' on Instance uuid 798257f7-0590-4f82-82b0-d428cc6e6e92 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2035.389577] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3732fbaf-503d-4913-b5b9-17576fca5005 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2035.415425] env[62405]: DEBUG oslo_concurrency.lockutils [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Releasing lock "refresh_cache-46b794f6-e858-45e6-9977-98ab246482f3" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2035.415752] env[62405]: DEBUG nova.compute.manager [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 46b794f6-e858-45e6-9977-98ab246482f3] Instance network_info: |[{"id": "1cb8209b-2a23-499d-b852-91ad4d89784e", "address": "fa:16:3e:c5:15:8c", "network": {"id": "a7be14dc-1fb5-4a85-a574-ff7086958535", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-359688506-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03a423f493034065bb1591d14d215ed8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77aa121f-8fb6-42f3-aaea-43addfe449b2", "external-id": "nsx-vlan-transportzone-288", "segmentation_id": 288, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1cb8209b-2a", "ovs_interfaceid": "1cb8209b-2a23-499d-b852-91ad4d89784e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2035.416228] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 46b794f6-e858-45e6-9977-98ab246482f3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c5:15:8c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '77aa121f-8fb6-42f3-aaea-43addfe449b2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1cb8209b-2a23-499d-b852-91ad4d89784e', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2035.423876] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Creating folder: Project (03a423f493034065bb1591d14d215ed8). Parent ref: group-v401284. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2035.424290] env[62405]: INFO nova.compute.manager [-] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Took 2.22 seconds to deallocate network for instance. [ 2035.424783] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4de4c158-e64a-41ce-8eac-c369f50dba89 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.436557] env[62405]: INFO nova.compute.manager [-] [instance: 14512ed2-9eae-4753-b83c-8c0d0d5d9432] Took 2.24 seconds to deallocate network for instance. [ 2035.439491] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Created folder: Project (03a423f493034065bb1591d14d215ed8) in parent group-v401284. [ 2035.439662] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Creating folder: Instances. Parent ref: group-v401581. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2035.443311] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7fe3fd61-c114-4f90-bc57-22465f38c7be {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.451375] env[62405]: DEBUG oslo_concurrency.lockutils [None req-20353284-96d8-458e-a74f-940b8b6e8389 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2035.454597] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Created folder: Instances in parent group-v401581. [ 2035.454703] env[62405]: DEBUG oslo.service.loopingcall [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2035.454797] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 46b794f6-e858-45e6-9977-98ab246482f3] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2035.454999] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d6a2bfd1-2aa0-4c54-9541-e6f92950eefa {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.482177] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2035.482177] env[62405]: value = "task-1948122" [ 2035.482177] env[62405]: _type = "Task" [ 2035.482177] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2035.491586] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1948122, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2035.495160] env[62405]: DEBUG nova.compute.manager [req-1f8b3a8a-ce8a-462e-b404-4cd88a0f766a req-a6d57883-3717-4f3e-9634-3301b439a32f service nova] [instance: 46b794f6-e858-45e6-9977-98ab246482f3] Received event network-changed-1cb8209b-2a23-499d-b852-91ad4d89784e {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2035.495417] env[62405]: DEBUG nova.compute.manager [req-1f8b3a8a-ce8a-462e-b404-4cd88a0f766a req-a6d57883-3717-4f3e-9634-3301b439a32f service nova] [instance: 46b794f6-e858-45e6-9977-98ab246482f3] Refreshing instance network info cache due to event network-changed-1cb8209b-2a23-499d-b852-91ad4d89784e. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 2035.495638] env[62405]: DEBUG oslo_concurrency.lockutils [req-1f8b3a8a-ce8a-462e-b404-4cd88a0f766a req-a6d57883-3717-4f3e-9634-3301b439a32f service nova] Acquiring lock "refresh_cache-46b794f6-e858-45e6-9977-98ab246482f3" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2035.495785] env[62405]: DEBUG oslo_concurrency.lockutils [req-1f8b3a8a-ce8a-462e-b404-4cd88a0f766a req-a6d57883-3717-4f3e-9634-3301b439a32f service nova] Acquired lock "refresh_cache-46b794f6-e858-45e6-9977-98ab246482f3" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2035.495945] env[62405]: DEBUG nova.network.neutron [req-1f8b3a8a-ce8a-462e-b404-4cd88a0f766a req-a6d57883-3717-4f3e-9634-3301b439a32f service nova] [instance: 46b794f6-e858-45e6-9977-98ab246482f3] Refreshing network info cache for port 1cb8209b-2a23-499d-b852-91ad4d89784e {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2035.594053] env[62405]: DEBUG oslo_concurrency.lockutils [None req-808d3590-cf6d-4efe-9fde-aeca2bdcd299 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Acquiring lock "81d9be97-9147-4754-80c2-68c1a389842e" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2035.594348] env[62405]: DEBUG oslo_concurrency.lockutils [None req-808d3590-cf6d-4efe-9fde-aeca2bdcd299 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Lock "81d9be97-9147-4754-80c2-68c1a389842e" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2035.594583] env[62405]: INFO nova.compute.manager [None req-808d3590-cf6d-4efe-9fde-aeca2bdcd299 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Attaching volume caee8648-7be2-4e64-811e-8bad831e1865 to /dev/sdb [ 2035.646101] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96b2210f-6844-4020-b203-795fc9fe8c97 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.656392] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e9c07cb-e831-46ab-b32b-ba5d77ca0c31 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.672110] env[62405]: DEBUG nova.virt.block_device [None req-808d3590-cf6d-4efe-9fde-aeca2bdcd299 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Updating existing volume attachment record: e0da0c87-e87c-48a0-8630-6d28901aa905 {{(pid=62405) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 2035.727223] env[62405]: DEBUG nova.network.neutron [None req-6c6ca6f9-5da5-4615-889d-d4a7d9e344fa tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Port 2026016a-87b1-42ae-a04f-d95c5fb37377 binding to destination host cpu-1 is already ACTIVE {{(pid=62405) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 2035.727531] env[62405]: DEBUG oslo_concurrency.lockutils [None req-6c6ca6f9-5da5-4615-889d-d4a7d9e344fa tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Acquiring lock "refresh_cache-c39d9059-8da4-4c8d-99ab-d66b8445e7da" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2035.727710] env[62405]: DEBUG oslo_concurrency.lockutils [None req-6c6ca6f9-5da5-4615-889d-d4a7d9e344fa tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Acquired lock "refresh_cache-c39d9059-8da4-4c8d-99ab-d66b8445e7da" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2035.727837] env[62405]: DEBUG nova.network.neutron [None req-6c6ca6f9-5da5-4615-889d-d4a7d9e344fa tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2035.823934] env[62405]: DEBUG oslo_vmware.api [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948119, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2035.868532] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Acquiring lock "refresh_cache-58691f22-5acd-45db-b587-df784a000813" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2035.868692] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Acquired lock "refresh_cache-58691f22-5acd-45db-b587-df784a000813" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2035.868944] env[62405]: DEBUG nova.network.neutron [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: 58691f22-5acd-45db-b587-df784a000813] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2035.889358] env[62405]: DEBUG nova.network.neutron [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] [instance: 2e6ca3c8-d7e1-4a5d-9fdd-aa944fd9c2f8] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2035.933712] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7e4ec1a4-19ed-4de5-9afa-99a0a36c8f8a tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2035.946261] env[62405]: DEBUG oslo_concurrency.lockutils [None req-bc39d78b-ba71-49b9-a985-c34c79513351 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2035.993244] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1948122, 'name': CreateVM_Task} progress is 25%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2036.055429] env[62405]: DEBUG nova.network.neutron [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] [instance: 2e6ca3c8-d7e1-4a5d-9fdd-aa944fd9c2f8] Updating instance_info_cache with network_info: [{"id": "6c5dc5af-ff6d-4205-a204-1c594c3c805a", "address": "fa:16:3e:62:96:9e", "network": {"id": "d29e1845-c306-4556-a446-0390a9a3ebbf", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-584540890-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cb2a0fcacd6247ec811b95f61c752fb8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "205fb402-8eaf-4b61-8f57-8f216024179a", "external-id": "nsx-vlan-transportzone-78", "segmentation_id": 78, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c5dc5af-ff", "ovs_interfaceid": "6c5dc5af-ff6d-4205-a204-1c594c3c805a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2036.143754] env[62405]: DEBUG nova.compute.manager [req-60961a83-d2e6-4b16-94c0-27b9c8204218 req-a9e66dea-0b70-4065-9d3b-c11b0121fa3d service nova] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] Received event network-vif-deleted-d440b728-2371-4e75-bb9f-2330f0318cae {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2036.143877] env[62405]: DEBUG nova.compute.manager [req-60961a83-d2e6-4b16-94c0-27b9c8204218 req-a9e66dea-0b70-4065-9d3b-c11b0121fa3d service nova] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Received event network-vif-deleted-995727bb-89db-40f7-a02b-916afa2c9641 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2036.144070] env[62405]: DEBUG nova.compute.manager [req-60961a83-d2e6-4b16-94c0-27b9c8204218 req-a9e66dea-0b70-4065-9d3b-c11b0121fa3d service nova] [instance: 14512ed2-9eae-4753-b83c-8c0d0d5d9432] Received event network-vif-deleted-fb939ba3-6c42-4855-80a4-e268dd0bbe54 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2036.158556] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8084054-5994-4de8-b756-9412cadae38c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.167460] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-526c66ec-2cdb-471c-aea0-22c1aeb00a8c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.203607] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b00215ec-0215-4209-9410-e34ce6a73189 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.211974] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-279cb338-5682-4ac2-9f5d-bc73c0aede23 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.225929] env[62405]: DEBUG nova.compute.provider_tree [None req-e00f8f99-b083-42f7-b59d-28f7e9432ddb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2036.267628] env[62405]: DEBUG nova.network.neutron [req-1f8b3a8a-ce8a-462e-b404-4cd88a0f766a req-a6d57883-3717-4f3e-9634-3301b439a32f service nova] [instance: 46b794f6-e858-45e6-9977-98ab246482f3] Updated VIF entry in instance network info cache for port 1cb8209b-2a23-499d-b852-91ad4d89784e. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2036.267990] env[62405]: DEBUG nova.network.neutron [req-1f8b3a8a-ce8a-462e-b404-4cd88a0f766a req-a6d57883-3717-4f3e-9634-3301b439a32f service nova] [instance: 46b794f6-e858-45e6-9977-98ab246482f3] Updating instance_info_cache with network_info: [{"id": "1cb8209b-2a23-499d-b852-91ad4d89784e", "address": "fa:16:3e:c5:15:8c", "network": {"id": "a7be14dc-1fb5-4a85-a574-ff7086958535", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-359688506-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03a423f493034065bb1591d14d215ed8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77aa121f-8fb6-42f3-aaea-43addfe449b2", "external-id": "nsx-vlan-transportzone-288", "segmentation_id": 288, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1cb8209b-2a", "ovs_interfaceid": "1cb8209b-2a23-499d-b852-91ad4d89784e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2036.324297] env[62405]: DEBUG oslo_vmware.api [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948119, 'name': ReconfigVM_Task, 'duration_secs': 0.769352} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2036.324647] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-401573', 'volume_id': 'c64ac26e-4f56-4aad-931f-053141f488c8', 'name': 'volume-c64ac26e-4f56-4aad-931f-053141f488c8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': '6fcfada3-d73a-4814-bf45-d34b26d76d4a', 'attached_at': '', 'detached_at': '', 'volume_id': 'c64ac26e-4f56-4aad-931f-053141f488c8', 'serial': 'c64ac26e-4f56-4aad-931f-053141f488c8'} {{(pid=62405) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 2036.325160] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-50a09d9b-a5b5-4771-8c3d-a592b99affa8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.331529] env[62405]: DEBUG oslo_vmware.api [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Waiting for the task: (returnval){ [ 2036.331529] env[62405]: value = "task-1948126" [ 2036.331529] env[62405]: _type = "Task" [ 2036.331529] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2036.339250] env[62405]: DEBUG oslo_vmware.api [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948126, 'name': Rename_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2036.404969] env[62405]: DEBUG nova.network.neutron [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: 58691f22-5acd-45db-b587-df784a000813] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2036.444096] env[62405]: DEBUG nova.network.neutron [None req-6c6ca6f9-5da5-4615-889d-d4a7d9e344fa tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Updating instance_info_cache with network_info: [{"id": "2026016a-87b1-42ae-a04f-d95c5fb37377", "address": "fa:16:3e:bc:e8:85", "network": {"id": "3ca0a5a2-a18f-47fa-9d7b-0e27aa414878", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1312490542-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.142", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f3b50cc219314108945bfc8b2c21849a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7edb7c08-2fae-4df5-9ec6-5ccf06d7e337", "external-id": "nsx-vlan-transportzone-309", "segmentation_id": 309, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2026016a-87", "ovs_interfaceid": "2026016a-87b1-42ae-a04f-d95c5fb37377", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2036.494454] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1948122, 'name': CreateVM_Task, 'duration_secs': 0.747617} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2036.494697] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 46b794f6-e858-45e6-9977-98ab246482f3] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2036.495394] env[62405]: DEBUG oslo_concurrency.lockutils [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2036.495556] env[62405]: DEBUG oslo_concurrency.lockutils [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2036.495867] env[62405]: DEBUG oslo_concurrency.lockutils [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2036.496304] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-41350b8e-46bb-4e1b-acb8-b7b92fc11ac3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.500684] env[62405]: DEBUG oslo_vmware.api [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for the task: (returnval){ [ 2036.500684] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5281a59c-6124-a810-d8db-c8adbbaced9a" [ 2036.500684] env[62405]: _type = "Task" [ 2036.500684] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2036.508625] env[62405]: DEBUG oslo_vmware.api [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5281a59c-6124-a810-d8db-c8adbbaced9a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2036.560145] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Releasing lock "refresh_cache-2e6ca3c8-d7e1-4a5d-9fdd-aa944fd9c2f8" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2036.560337] env[62405]: DEBUG nova.compute.manager [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] [instance: 2e6ca3c8-d7e1-4a5d-9fdd-aa944fd9c2f8] Instance network_info: |[{"id": "6c5dc5af-ff6d-4205-a204-1c594c3c805a", "address": "fa:16:3e:62:96:9e", "network": {"id": "d29e1845-c306-4556-a446-0390a9a3ebbf", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-584540890-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cb2a0fcacd6247ec811b95f61c752fb8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "205fb402-8eaf-4b61-8f57-8f216024179a", "external-id": "nsx-vlan-transportzone-78", "segmentation_id": 78, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c5dc5af-ff", "ovs_interfaceid": "6c5dc5af-ff6d-4205-a204-1c594c3c805a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2036.561406] env[62405]: DEBUG nova.network.neutron [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: 58691f22-5acd-45db-b587-df784a000813] Updating instance_info_cache with network_info: [{"id": "c38487c8-b41a-4c0c-8103-3392186dbdee", "address": "fa:16:3e:66:a1:15", "network": {"id": "006b4fbf-fefb-47b8-b2e9-30e8308e87b9", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-369344299-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "4a78c04608454ac88ecb97b4c87a9d17", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6815237d-f565-474d-a3c0-9c675478eb00", "external-id": "nsx-vlan-transportzone-526", "segmentation_id": 526, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc38487c8-b4", "ovs_interfaceid": "c38487c8-b41a-4c0c-8103-3392186dbdee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2036.562745] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] [instance: 2e6ca3c8-d7e1-4a5d-9fdd-aa944fd9c2f8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:62:96:9e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '205fb402-8eaf-4b61-8f57-8f216024179a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6c5dc5af-ff6d-4205-a204-1c594c3c805a', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2036.570675] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Creating folder: Project (cb2a0fcacd6247ec811b95f61c752fb8). Parent ref: group-v401284. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2036.571183] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f508e0a7-a381-4315-981f-c9102550f210 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.582573] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Created folder: Project (cb2a0fcacd6247ec811b95f61c752fb8) in parent group-v401284. [ 2036.582761] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Creating folder: Instances. Parent ref: group-v401586. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2036.582987] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f0a64af4-32ad-472b-90f9-0c20c4c3fc2f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.592103] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Created folder: Instances in parent group-v401586. [ 2036.592335] env[62405]: DEBUG oslo.service.loopingcall [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2036.592515] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2e6ca3c8-d7e1-4a5d-9fdd-aa944fd9c2f8] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2036.592703] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3d03d607-ac22-4a98-8290-0b0202743003 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.612299] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2036.612299] env[62405]: value = "task-1948129" [ 2036.612299] env[62405]: _type = "Task" [ 2036.612299] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2036.619293] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1948129, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2036.730876] env[62405]: DEBUG nova.scheduler.client.report [None req-e00f8f99-b083-42f7-b59d-28f7e9432ddb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2036.771029] env[62405]: DEBUG oslo_concurrency.lockutils [req-1f8b3a8a-ce8a-462e-b404-4cd88a0f766a req-a6d57883-3717-4f3e-9634-3301b439a32f service nova] Releasing lock "refresh_cache-46b794f6-e858-45e6-9977-98ab246482f3" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2036.771914] env[62405]: DEBUG nova.compute.manager [req-1f8b3a8a-ce8a-462e-b404-4cd88a0f766a req-a6d57883-3717-4f3e-9634-3301b439a32f service nova] [instance: 798257f7-0590-4f82-82b0-d428cc6e6e92] Received event network-vif-deleted-ff462cd1-7fb0-4546-92d9-d317279b4c21 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2036.771914] env[62405]: DEBUG nova.compute.manager [req-1f8b3a8a-ce8a-462e-b404-4cd88a0f766a req-a6d57883-3717-4f3e-9634-3301b439a32f service nova] [instance: 41e5385d-f0c7-4431-8424-e60dbeebaf8e] Received event network-vif-deleted-546c09a6-a133-400e-b556-9b225a501a58 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2036.844027] env[62405]: DEBUG oslo_vmware.api [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948126, 'name': Rename_Task, 'duration_secs': 0.193992} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2036.844313] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2036.844570] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-998a5d9a-588f-4590-a000-cae7e8d2a489 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.851268] env[62405]: DEBUG oslo_vmware.api [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Waiting for the task: (returnval){ [ 2036.851268] env[62405]: value = "task-1948130" [ 2036.851268] env[62405]: _type = "Task" [ 2036.851268] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2036.859219] env[62405]: DEBUG oslo_vmware.api [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948130, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2036.947234] env[62405]: DEBUG oslo_concurrency.lockutils [None req-6c6ca6f9-5da5-4615-889d-d4a7d9e344fa tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Releasing lock "refresh_cache-c39d9059-8da4-4c8d-99ab-d66b8445e7da" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2037.010485] env[62405]: DEBUG oslo_vmware.api [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5281a59c-6124-a810-d8db-c8adbbaced9a, 'name': SearchDatastore_Task, 'duration_secs': 0.012611} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2037.010745] env[62405]: DEBUG oslo_concurrency.lockutils [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2037.010985] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 46b794f6-e858-45e6-9977-98ab246482f3] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2037.011226] env[62405]: DEBUG oslo_concurrency.lockutils [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2037.011375] env[62405]: DEBUG oslo_concurrency.lockutils [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2037.011553] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2037.011811] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e2aa841c-ddce-42b0-802f-76a8049c6d4b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.019668] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2037.019835] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2037.020515] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1577752a-cb83-4625-bf10-b271f2435687 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.025627] env[62405]: DEBUG oslo_vmware.api [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for the task: (returnval){ [ 2037.025627] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52f14b7c-dfb1-b081-e653-5546df3404a5" [ 2037.025627] env[62405]: _type = "Task" [ 2037.025627] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2037.032634] env[62405]: DEBUG oslo_vmware.api [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52f14b7c-dfb1-b081-e653-5546df3404a5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2037.072269] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Releasing lock "refresh_cache-58691f22-5acd-45db-b587-df784a000813" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2037.072585] env[62405]: DEBUG nova.compute.manager [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: 58691f22-5acd-45db-b587-df784a000813] Instance network_info: |[{"id": "c38487c8-b41a-4c0c-8103-3392186dbdee", "address": "fa:16:3e:66:a1:15", "network": {"id": "006b4fbf-fefb-47b8-b2e9-30e8308e87b9", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-369344299-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "4a78c04608454ac88ecb97b4c87a9d17", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6815237d-f565-474d-a3c0-9c675478eb00", "external-id": "nsx-vlan-transportzone-526", "segmentation_id": 526, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc38487c8-b4", "ovs_interfaceid": "c38487c8-b41a-4c0c-8103-3392186dbdee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2037.072967] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: 58691f22-5acd-45db-b587-df784a000813] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:66:a1:15', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6815237d-f565-474d-a3c0-9c675478eb00', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c38487c8-b41a-4c0c-8103-3392186dbdee', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2037.080285] env[62405]: DEBUG oslo.service.loopingcall [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2037.080500] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 58691f22-5acd-45db-b587-df784a000813] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2037.080653] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-28a2a6e5-0f90-4cc0-bf7b-6754b8f52f6b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.100684] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2037.100684] env[62405]: value = "task-1948131" [ 2037.100684] env[62405]: _type = "Task" [ 2037.100684] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2037.109263] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1948131, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2037.120098] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1948129, 'name': CreateVM_Task, 'duration_secs': 0.358862} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2037.120204] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2e6ca3c8-d7e1-4a5d-9fdd-aa944fd9c2f8] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2037.120886] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2037.121069] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2037.121380] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2037.121617] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7dbb34df-65ab-4f31-93e6-9cb4a02fed2e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.125803] env[62405]: DEBUG oslo_vmware.api [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Waiting for the task: (returnval){ [ 2037.125803] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]524aa782-6c02-2ed0-a656-445b260eb7a5" [ 2037.125803] env[62405]: _type = "Task" [ 2037.125803] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2037.134237] env[62405]: DEBUG oslo_vmware.api [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]524aa782-6c02-2ed0-a656-445b260eb7a5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2037.234363] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e00f8f99-b083-42f7-b59d-28f7e9432ddb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.856s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2037.236668] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3732fbaf-503d-4913-b5b9-17576fca5005 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.847s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2037.236919] env[62405]: DEBUG nova.objects.instance [None req-3732fbaf-503d-4913-b5b9-17576fca5005 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Lazy-loading 'resources' on Instance uuid 41e5385d-f0c7-4431-8424-e60dbeebaf8e {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2037.255325] env[62405]: INFO nova.scheduler.client.report [None req-e00f8f99-b083-42f7-b59d-28f7e9432ddb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Deleted allocations for instance 798257f7-0590-4f82-82b0-d428cc6e6e92 [ 2037.361450] env[62405]: DEBUG oslo_vmware.api [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948130, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2037.450972] env[62405]: DEBUG nova.compute.manager [None req-6c6ca6f9-5da5-4615-889d-d4a7d9e344fa tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=62405) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:901}} [ 2037.521426] env[62405]: DEBUG nova.compute.manager [req-be105186-36b2-4924-a35f-3c7df08e3a65 req-7c2838ef-3e1d-4986-970e-d43fbd5684fe service nova] [instance: 58691f22-5acd-45db-b587-df784a000813] Received event network-vif-plugged-c38487c8-b41a-4c0c-8103-3392186dbdee {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2037.521426] env[62405]: DEBUG oslo_concurrency.lockutils [req-be105186-36b2-4924-a35f-3c7df08e3a65 req-7c2838ef-3e1d-4986-970e-d43fbd5684fe service nova] Acquiring lock "58691f22-5acd-45db-b587-df784a000813-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2037.521583] env[62405]: DEBUG oslo_concurrency.lockutils [req-be105186-36b2-4924-a35f-3c7df08e3a65 req-7c2838ef-3e1d-4986-970e-d43fbd5684fe service nova] Lock "58691f22-5acd-45db-b587-df784a000813-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2037.521783] env[62405]: DEBUG oslo_concurrency.lockutils [req-be105186-36b2-4924-a35f-3c7df08e3a65 req-7c2838ef-3e1d-4986-970e-d43fbd5684fe service nova] Lock "58691f22-5acd-45db-b587-df784a000813-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2037.522039] env[62405]: DEBUG nova.compute.manager [req-be105186-36b2-4924-a35f-3c7df08e3a65 req-7c2838ef-3e1d-4986-970e-d43fbd5684fe service nova] [instance: 58691f22-5acd-45db-b587-df784a000813] No waiting events found dispatching network-vif-plugged-c38487c8-b41a-4c0c-8103-3392186dbdee {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2037.522250] env[62405]: WARNING nova.compute.manager [req-be105186-36b2-4924-a35f-3c7df08e3a65 req-7c2838ef-3e1d-4986-970e-d43fbd5684fe service nova] [instance: 58691f22-5acd-45db-b587-df784a000813] Received unexpected event network-vif-plugged-c38487c8-b41a-4c0c-8103-3392186dbdee for instance with vm_state building and task_state spawning. [ 2037.522419] env[62405]: DEBUG nova.compute.manager [req-be105186-36b2-4924-a35f-3c7df08e3a65 req-7c2838ef-3e1d-4986-970e-d43fbd5684fe service nova] [instance: 58691f22-5acd-45db-b587-df784a000813] Received event network-changed-c38487c8-b41a-4c0c-8103-3392186dbdee {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2037.522956] env[62405]: DEBUG nova.compute.manager [req-be105186-36b2-4924-a35f-3c7df08e3a65 req-7c2838ef-3e1d-4986-970e-d43fbd5684fe service nova] [instance: 58691f22-5acd-45db-b587-df784a000813] Refreshing instance network info cache due to event network-changed-c38487c8-b41a-4c0c-8103-3392186dbdee. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 2037.523218] env[62405]: DEBUG oslo_concurrency.lockutils [req-be105186-36b2-4924-a35f-3c7df08e3a65 req-7c2838ef-3e1d-4986-970e-d43fbd5684fe service nova] Acquiring lock "refresh_cache-58691f22-5acd-45db-b587-df784a000813" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2037.523376] env[62405]: DEBUG oslo_concurrency.lockutils [req-be105186-36b2-4924-a35f-3c7df08e3a65 req-7c2838ef-3e1d-4986-970e-d43fbd5684fe service nova] Acquired lock "refresh_cache-58691f22-5acd-45db-b587-df784a000813" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2037.523543] env[62405]: DEBUG nova.network.neutron [req-be105186-36b2-4924-a35f-3c7df08e3a65 req-7c2838ef-3e1d-4986-970e-d43fbd5684fe service nova] [instance: 58691f22-5acd-45db-b587-df784a000813] Refreshing network info cache for port c38487c8-b41a-4c0c-8103-3392186dbdee {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2037.538042] env[62405]: DEBUG oslo_vmware.api [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52f14b7c-dfb1-b081-e653-5546df3404a5, 'name': SearchDatastore_Task, 'duration_secs': 0.008934} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2037.539122] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2ddb0c4c-984d-4c33-9417-d06b4827c427 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.544407] env[62405]: DEBUG oslo_vmware.api [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for the task: (returnval){ [ 2037.544407] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]522076f1-8f60-ab62-e9c2-348cb1f02c76" [ 2037.544407] env[62405]: _type = "Task" [ 2037.544407] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2037.552890] env[62405]: DEBUG oslo_vmware.api [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]522076f1-8f60-ab62-e9c2-348cb1f02c76, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2037.609580] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1948131, 'name': CreateVM_Task, 'duration_secs': 0.409273} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2037.609741] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 58691f22-5acd-45db-b587-df784a000813] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2037.610387] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2037.634630] env[62405]: DEBUG oslo_vmware.api [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]524aa782-6c02-2ed0-a656-445b260eb7a5, 'name': SearchDatastore_Task, 'duration_secs': 0.009268} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2037.634850] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2037.635090] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] [instance: 2e6ca3c8-d7e1-4a5d-9fdd-aa944fd9c2f8] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2037.635295] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2037.635517] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2037.635850] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2037.636093] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-77ea50be-93c9-4a44-a9dc-3f9da4df183c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.641726] env[62405]: DEBUG oslo_vmware.api [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Waiting for the task: (returnval){ [ 2037.641726] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52c48497-a447-3a49-f726-a8891e79729b" [ 2037.641726] env[62405]: _type = "Task" [ 2037.641726] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2037.649175] env[62405]: DEBUG oslo_vmware.api [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52c48497-a447-3a49-f726-a8891e79729b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2037.764029] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e00f8f99-b083-42f7-b59d-28f7e9432ddb tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Lock "798257f7-0590-4f82-82b0-d428cc6e6e92" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.916s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2037.861337] env[62405]: DEBUG oslo_vmware.api [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948130, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2037.960688] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d88d5781-9fcc-4015-b06a-fedd42950aa3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.968591] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60cbefa5-0ded-4f7d-9ff9-43b7790f12e5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.997846] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0c997ee-50bf-4943-bbed-d5777ee9169d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2038.005074] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8cd9515-2d6e-4c6e-bdab-8cc25f4084c2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2038.019641] env[62405]: DEBUG nova.compute.provider_tree [None req-3732fbaf-503d-4913-b5b9-17576fca5005 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2038.055290] env[62405]: DEBUG oslo_vmware.api [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]522076f1-8f60-ab62-e9c2-348cb1f02c76, 'name': SearchDatastore_Task, 'duration_secs': 0.010375} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2038.057380] env[62405]: DEBUG oslo_concurrency.lockutils [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2038.057658] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 46b794f6-e858-45e6-9977-98ab246482f3/46b794f6-e858-45e6-9977-98ab246482f3.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2038.058142] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2038.058343] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2038.058559] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c85ca1d5-f84a-473a-90b0-d9cb47e59a2a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2038.062622] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c2b9ff86-b83a-4fe9-8bd5-d4a78d2f3400 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2038.069784] env[62405]: DEBUG oslo_vmware.api [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for the task: (returnval){ [ 2038.069784] env[62405]: value = "task-1948133" [ 2038.069784] env[62405]: _type = "Task" [ 2038.069784] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2038.074028] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2038.074224] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2038.075294] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d2732cf6-538e-41c2-9164-c688afae8800 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2038.081111] env[62405]: DEBUG oslo_vmware.api [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948133, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2038.084357] env[62405]: DEBUG oslo_vmware.api [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Waiting for the task: (returnval){ [ 2038.084357] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]522ccc64-fc40-3f87-9027-bae814f55e64" [ 2038.084357] env[62405]: _type = "Task" [ 2038.084357] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2038.092494] env[62405]: DEBUG oslo_vmware.api [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]522ccc64-fc40-3f87-9027-bae814f55e64, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2038.153332] env[62405]: DEBUG oslo_vmware.api [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52c48497-a447-3a49-f726-a8891e79729b, 'name': SearchDatastore_Task, 'duration_secs': 0.00913} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2038.156290] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2038.156573] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: 58691f22-5acd-45db-b587-df784a000813] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2038.156813] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2038.254874] env[62405]: DEBUG nova.network.neutron [req-be105186-36b2-4924-a35f-3c7df08e3a65 req-7c2838ef-3e1d-4986-970e-d43fbd5684fe service nova] [instance: 58691f22-5acd-45db-b587-df784a000813] Updated VIF entry in instance network info cache for port c38487c8-b41a-4c0c-8103-3392186dbdee. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2038.255398] env[62405]: DEBUG nova.network.neutron [req-be105186-36b2-4924-a35f-3c7df08e3a65 req-7c2838ef-3e1d-4986-970e-d43fbd5684fe service nova] [instance: 58691f22-5acd-45db-b587-df784a000813] Updating instance_info_cache with network_info: [{"id": "c38487c8-b41a-4c0c-8103-3392186dbdee", "address": "fa:16:3e:66:a1:15", "network": {"id": "006b4fbf-fefb-47b8-b2e9-30e8308e87b9", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-369344299-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "4a78c04608454ac88ecb97b4c87a9d17", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6815237d-f565-474d-a3c0-9c675478eb00", "external-id": "nsx-vlan-transportzone-526", "segmentation_id": 526, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc38487c8-b4", "ovs_interfaceid": "c38487c8-b41a-4c0c-8103-3392186dbdee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2038.364720] env[62405]: DEBUG oslo_vmware.api [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948130, 'name': PowerOnVM_Task, 'duration_secs': 1.337219} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2038.364720] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2038.401627] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2038.401679] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2038.488256] env[62405]: DEBUG nova.compute.manager [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2038.489138] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e714900a-11fc-411f-81fd-48d7a9b14164 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2038.523063] env[62405]: DEBUG nova.scheduler.client.report [None req-3732fbaf-503d-4913-b5b9-17576fca5005 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2038.559130] env[62405]: DEBUG oslo_concurrency.lockutils [None req-6c6ca6f9-5da5-4615-889d-d4a7d9e344fa tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2038.584142] env[62405]: DEBUG oslo_vmware.api [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948133, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2038.595184] env[62405]: DEBUG oslo_vmware.api [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]522ccc64-fc40-3f87-9027-bae814f55e64, 'name': SearchDatastore_Task, 'duration_secs': 0.011069} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2038.595986] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-51f1001c-c872-4f56-9461-b3030d8593b6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2038.601479] env[62405]: DEBUG oslo_vmware.api [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Waiting for the task: (returnval){ [ 2038.601479] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52b9104b-c57b-1710-d88c-a5b8101af00c" [ 2038.601479] env[62405]: _type = "Task" [ 2038.601479] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2038.609189] env[62405]: DEBUG oslo_vmware.api [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52b9104b-c57b-1710-d88c-a5b8101af00c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2038.758270] env[62405]: DEBUG oslo_concurrency.lockutils [req-be105186-36b2-4924-a35f-3c7df08e3a65 req-7c2838ef-3e1d-4986-970e-d43fbd5684fe service nova] Releasing lock "refresh_cache-58691f22-5acd-45db-b587-df784a000813" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2038.911432] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2038.911755] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Starting heal instance info cache {{(pid=62405) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10446}} [ 2039.006119] env[62405]: DEBUG oslo_concurrency.lockutils [None req-1eab51b9-8013-4593-9bf4-b77d0951cb31 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Lock "6fcfada3-d73a-4814-bf45-d34b26d76d4a" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 35.545s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2039.029040] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3732fbaf-503d-4913-b5b9-17576fca5005 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.792s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2039.030834] env[62405]: DEBUG oslo_concurrency.lockutils [None req-20353284-96d8-458e-a74f-940b8b6e8389 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.580s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2039.031081] env[62405]: DEBUG nova.objects.instance [None req-20353284-96d8-458e-a74f-940b8b6e8389 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Lazy-loading 'resources' on Instance uuid 556e1bca-f2f1-4200-96df-997d48ce5a15 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2039.068783] env[62405]: INFO nova.scheduler.client.report [None req-3732fbaf-503d-4913-b5b9-17576fca5005 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Deleted allocations for instance 41e5385d-f0c7-4431-8424-e60dbeebaf8e [ 2039.079921] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a72b875d-2a07-447e-8f34-0e5f0d352323 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Acquiring lock "4d59d9fd-23df-4933-97ed-32602e51e9aa" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2039.080581] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a72b875d-2a07-447e-8f34-0e5f0d352323 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Lock "4d59d9fd-23df-4933-97ed-32602e51e9aa" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2039.080903] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a72b875d-2a07-447e-8f34-0e5f0d352323 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Acquiring lock "4d59d9fd-23df-4933-97ed-32602e51e9aa-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2039.081707] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a72b875d-2a07-447e-8f34-0e5f0d352323 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Lock "4d59d9fd-23df-4933-97ed-32602e51e9aa-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2039.081707] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a72b875d-2a07-447e-8f34-0e5f0d352323 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Lock "4d59d9fd-23df-4933-97ed-32602e51e9aa-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2039.083924] env[62405]: INFO nova.compute.manager [None req-a72b875d-2a07-447e-8f34-0e5f0d352323 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 4d59d9fd-23df-4933-97ed-32602e51e9aa] Terminating instance [ 2039.091366] env[62405]: DEBUG oslo_vmware.api [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948133, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.521188} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2039.092371] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 46b794f6-e858-45e6-9977-98ab246482f3/46b794f6-e858-45e6-9977-98ab246482f3.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2039.092371] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 46b794f6-e858-45e6-9977-98ab246482f3] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2039.092371] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b2065e11-8e6a-4f33-b980-4761d8aa4478 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.101400] env[62405]: DEBUG oslo_vmware.api [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for the task: (returnval){ [ 2039.101400] env[62405]: value = "task-1948134" [ 2039.101400] env[62405]: _type = "Task" [ 2039.101400] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2039.114051] env[62405]: DEBUG oslo_vmware.api [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52b9104b-c57b-1710-d88c-a5b8101af00c, 'name': SearchDatastore_Task, 'duration_secs': 0.00873} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2039.116929] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2039.117259] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 2e6ca3c8-d7e1-4a5d-9fdd-aa944fd9c2f8/2e6ca3c8-d7e1-4a5d-9fdd-aa944fd9c2f8.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2039.117910] env[62405]: DEBUG oslo_vmware.api [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948134, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2039.118177] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2039.118373] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2039.118597] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d2e96f38-c3de-454a-8987-be5f8679ce19 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.121705] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-37f9457d-6518-429c-89da-4d182d00bb28 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.129446] env[62405]: DEBUG oslo_vmware.api [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Waiting for the task: (returnval){ [ 2039.129446] env[62405]: value = "task-1948135" [ 2039.129446] env[62405]: _type = "Task" [ 2039.129446] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2039.130749] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2039.130924] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2039.134455] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4d1c576f-f5b7-4c1b-a76c-9da004446a3e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.141654] env[62405]: DEBUG oslo_vmware.api [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Task: {'id': task-1948135, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2039.142559] env[62405]: DEBUG oslo_vmware.api [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Waiting for the task: (returnval){ [ 2039.142559] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52afae19-35da-fc8a-633c-536e72e05b54" [ 2039.142559] env[62405]: _type = "Task" [ 2039.142559] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2039.151471] env[62405]: DEBUG oslo_vmware.api [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52afae19-35da-fc8a-633c-536e72e05b54, 'name': SearchDatastore_Task, 'duration_secs': 0.009223} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2039.152247] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bb9098ee-958a-49fd-a311-70f5819c85fa {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.157251] env[62405]: DEBUG oslo_vmware.api [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Waiting for the task: (returnval){ [ 2039.157251] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52b2d0f6-2bcb-44b3-f85f-ab8588e89018" [ 2039.157251] env[62405]: _type = "Task" [ 2039.157251] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2039.164849] env[62405]: DEBUG oslo_vmware.api [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52b2d0f6-2bcb-44b3-f85f-ab8588e89018, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2039.580247] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3732fbaf-503d-4913-b5b9-17576fca5005 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Lock "41e5385d-f0c7-4431-8424-e60dbeebaf8e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.945s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2039.587264] env[62405]: DEBUG nova.compute.manager [None req-a72b875d-2a07-447e-8f34-0e5f0d352323 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 4d59d9fd-23df-4933-97ed-32602e51e9aa] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2039.587528] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-a72b875d-2a07-447e-8f34-0e5f0d352323 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 4d59d9fd-23df-4933-97ed-32602e51e9aa] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2039.588417] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdc9dfc6-b478-4af8-a654-b0358a398d9f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.599342] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-a72b875d-2a07-447e-8f34-0e5f0d352323 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 4d59d9fd-23df-4933-97ed-32602e51e9aa] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2039.600678] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b6af3421-2dbd-4ff2-af82-76d099697358 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.612315] env[62405]: DEBUG oslo_vmware.api [None req-a72b875d-2a07-447e-8f34-0e5f0d352323 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Waiting for the task: (returnval){ [ 2039.612315] env[62405]: value = "task-1948136" [ 2039.612315] env[62405]: _type = "Task" [ 2039.612315] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2039.618928] env[62405]: DEBUG oslo_vmware.api [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948134, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.154014} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2039.620173] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 46b794f6-e858-45e6-9977-98ab246482f3] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2039.620595] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1a50590-f4aa-4478-b0ec-c9ba11d9666d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.630768] env[62405]: DEBUG oslo_vmware.api [None req-a72b875d-2a07-447e-8f34-0e5f0d352323 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1948136, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2039.651996] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 46b794f6-e858-45e6-9977-98ab246482f3] Reconfiguring VM instance instance-0000006f to attach disk [datastore1] 46b794f6-e858-45e6-9977-98ab246482f3/46b794f6-e858-45e6-9977-98ab246482f3.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2039.655810] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c222aea4-73c2-4bc2-808f-db7d60161654 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.673841] env[62405]: DEBUG oslo_vmware.api [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Task: {'id': task-1948135, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2039.682074] env[62405]: DEBUG oslo_vmware.api [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52b2d0f6-2bcb-44b3-f85f-ab8588e89018, 'name': SearchDatastore_Task, 'duration_secs': 0.008948} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2039.683399] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2039.683656] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 58691f22-5acd-45db-b587-df784a000813/58691f22-5acd-45db-b587-df784a000813.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2039.683980] env[62405]: DEBUG oslo_vmware.api [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for the task: (returnval){ [ 2039.683980] env[62405]: value = "task-1948137" [ 2039.683980] env[62405]: _type = "Task" [ 2039.683980] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2039.686506] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ee20e626-3678-472f-9ac9-b6ba918bf56f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.696845] env[62405]: DEBUG oslo_vmware.api [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948137, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2039.699167] env[62405]: DEBUG oslo_vmware.api [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Waiting for the task: (returnval){ [ 2039.699167] env[62405]: value = "task-1948138" [ 2039.699167] env[62405]: _type = "Task" [ 2039.699167] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2039.711290] env[62405]: DEBUG oslo_vmware.api [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Task: {'id': task-1948138, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2039.802989] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e67c3412-98fc-4642-a851-b06e9613b80f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.810664] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c145b38d-a3ee-4c0d-aa47-96e6eb327010 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.840515] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-662ba77b-2adf-4386-8c1d-bde3e899ccd6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.847950] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61d2cd3a-e2e6-4ac6-8da2-f2929eb20436 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.862830] env[62405]: DEBUG nova.compute.provider_tree [None req-20353284-96d8-458e-a74f-940b8b6e8389 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2039.973525] env[62405]: DEBUG oslo_concurrency.lockutils [None req-cbdd435f-fbb9-4c9a-a640-a07f5ec6da82 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Acquiring lock "78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2039.973803] env[62405]: DEBUG oslo_concurrency.lockutils [None req-cbdd435f-fbb9-4c9a-a640-a07f5ec6da82 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Lock "78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2039.974017] env[62405]: DEBUG oslo_concurrency.lockutils [None req-cbdd435f-fbb9-4c9a-a640-a07f5ec6da82 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Acquiring lock "78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2039.974426] env[62405]: DEBUG oslo_concurrency.lockutils [None req-cbdd435f-fbb9-4c9a-a640-a07f5ec6da82 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Lock "78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2039.974426] env[62405]: DEBUG oslo_concurrency.lockutils [None req-cbdd435f-fbb9-4c9a-a640-a07f5ec6da82 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Lock "78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2039.976943] env[62405]: INFO nova.compute.manager [None req-cbdd435f-fbb9-4c9a-a640-a07f5ec6da82 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Terminating instance [ 2040.123456] env[62405]: DEBUG oslo_vmware.api [None req-a72b875d-2a07-447e-8f34-0e5f0d352323 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1948136, 'name': PowerOffVM_Task, 'duration_secs': 0.19754} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2040.123773] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-a72b875d-2a07-447e-8f34-0e5f0d352323 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 4d59d9fd-23df-4933-97ed-32602e51e9aa] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2040.123925] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-a72b875d-2a07-447e-8f34-0e5f0d352323 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 4d59d9fd-23df-4933-97ed-32602e51e9aa] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2040.124202] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-24eb3f2f-86de-46eb-94d5-9a5190840e39 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.139754] env[62405]: DEBUG oslo_vmware.api [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Task: {'id': task-1948135, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.559256} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2040.140014] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 2e6ca3c8-d7e1-4a5d-9fdd-aa944fd9c2f8/2e6ca3c8-d7e1-4a5d-9fdd-aa944fd9c2f8.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2040.140350] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] [instance: 2e6ca3c8-d7e1-4a5d-9fdd-aa944fd9c2f8] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2040.140628] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-dd0eac43-873c-4efe-b167-aaec2abdcb5f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.148502] env[62405]: DEBUG oslo_vmware.api [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Waiting for the task: (returnval){ [ 2040.148502] env[62405]: value = "task-1948140" [ 2040.148502] env[62405]: _type = "Task" [ 2040.148502] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2040.157734] env[62405]: DEBUG oslo_vmware.api [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Task: {'id': task-1948140, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2040.201790] env[62405]: DEBUG oslo_vmware.api [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948137, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2040.211184] env[62405]: DEBUG oslo_vmware.api [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Task: {'id': task-1948138, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2040.213887] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-a72b875d-2a07-447e-8f34-0e5f0d352323 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 4d59d9fd-23df-4933-97ed-32602e51e9aa] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2040.214110] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-a72b875d-2a07-447e-8f34-0e5f0d352323 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 4d59d9fd-23df-4933-97ed-32602e51e9aa] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2040.214395] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-a72b875d-2a07-447e-8f34-0e5f0d352323 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Deleting the datastore file [datastore1] 4d59d9fd-23df-4933-97ed-32602e51e9aa {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2040.214589] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-03c3646c-24db-40d2-8294-a52501c1cacf {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.221886] env[62405]: DEBUG oslo_vmware.api [None req-a72b875d-2a07-447e-8f34-0e5f0d352323 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Waiting for the task: (returnval){ [ 2040.221886] env[62405]: value = "task-1948141" [ 2040.221886] env[62405]: _type = "Task" [ 2040.221886] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2040.226050] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-808d3590-cf6d-4efe-9fde-aeca2bdcd299 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Volume attach. Driver type: vmdk {{(pid=62405) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 2040.226292] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-808d3590-cf6d-4efe-9fde-aeca2bdcd299 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-401585', 'volume_id': 'caee8648-7be2-4e64-811e-8bad831e1865', 'name': 'volume-caee8648-7be2-4e64-811e-8bad831e1865', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '81d9be97-9147-4754-80c2-68c1a389842e', 'attached_at': '', 'detached_at': '', 'volume_id': 'caee8648-7be2-4e64-811e-8bad831e1865', 'serial': 'caee8648-7be2-4e64-811e-8bad831e1865'} {{(pid=62405) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 2040.227104] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fc48090-7966-4c99-8e15-97c31e1d978b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.245670] env[62405]: DEBUG oslo_vmware.api [None req-a72b875d-2a07-447e-8f34-0e5f0d352323 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1948141, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2040.246044] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8717b9a8-9d9d-469a-ba65-8fe3b16504ce {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.273100] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-808d3590-cf6d-4efe-9fde-aeca2bdcd299 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Reconfiguring VM instance instance-0000006a to attach disk [datastore1] volume-caee8648-7be2-4e64-811e-8bad831e1865/volume-caee8648-7be2-4e64-811e-8bad831e1865.vmdk or device None with type thin {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2040.273483] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dd05ca52-4e83-45ff-8cc6-8fcba5133506 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.292488] env[62405]: DEBUG oslo_vmware.api [None req-808d3590-cf6d-4efe-9fde-aeca2bdcd299 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Waiting for the task: (returnval){ [ 2040.292488] env[62405]: value = "task-1948142" [ 2040.292488] env[62405]: _type = "Task" [ 2040.292488] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2040.300291] env[62405]: DEBUG oslo_vmware.api [None req-808d3590-cf6d-4efe-9fde-aeca2bdcd299 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1948142, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2040.366647] env[62405]: DEBUG nova.scheduler.client.report [None req-20353284-96d8-458e-a74f-940b8b6e8389 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2040.483590] env[62405]: DEBUG nova.compute.manager [None req-cbdd435f-fbb9-4c9a-a640-a07f5ec6da82 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2040.483801] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-cbdd435f-fbb9-4c9a-a640-a07f5ec6da82 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2040.484676] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cdf2aeb-62e8-46c2-9722-1027251b4eee {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.492743] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-cbdd435f-fbb9-4c9a-a640-a07f5ec6da82 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2040.492977] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d964b372-0d64-40c9-933b-c88f7c20193d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.498947] env[62405]: DEBUG oslo_vmware.api [None req-cbdd435f-fbb9-4c9a-a640-a07f5ec6da82 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Waiting for the task: (returnval){ [ 2040.498947] env[62405]: value = "task-1948143" [ 2040.498947] env[62405]: _type = "Task" [ 2040.498947] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2040.506972] env[62405]: DEBUG oslo_vmware.api [None req-cbdd435f-fbb9-4c9a-a640-a07f5ec6da82 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1948143, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2040.658531] env[62405]: DEBUG oslo_vmware.api [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Task: {'id': task-1948140, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.096143} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2040.658875] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] [instance: 2e6ca3c8-d7e1-4a5d-9fdd-aa944fd9c2f8] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2040.659602] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a074c660-3a16-475a-972f-c2c16a06b158 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.681731] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] [instance: 2e6ca3c8-d7e1-4a5d-9fdd-aa944fd9c2f8] Reconfiguring VM instance instance-0000006e to attach disk [datastore1] 2e6ca3c8-d7e1-4a5d-9fdd-aa944fd9c2f8/2e6ca3c8-d7e1-4a5d-9fdd-aa944fd9c2f8.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2040.682033] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1e8659ed-0080-4dac-8480-33af47fd0ae2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.708414] env[62405]: DEBUG oslo_vmware.api [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948137, 'name': ReconfigVM_Task, 'duration_secs': 0.646762} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2040.708698] env[62405]: DEBUG oslo_vmware.api [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Waiting for the task: (returnval){ [ 2040.708698] env[62405]: value = "task-1948144" [ 2040.708698] env[62405]: _type = "Task" [ 2040.708698] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2040.709332] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 46b794f6-e858-45e6-9977-98ab246482f3] Reconfigured VM instance instance-0000006f to attach disk [datastore1] 46b794f6-e858-45e6-9977-98ab246482f3/46b794f6-e858-45e6-9977-98ab246482f3.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2040.710078] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-635d7de8-b7ad-4b41-bdfa-115a6c13f345 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.716970] env[62405]: DEBUG oslo_vmware.api [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Task: {'id': task-1948138, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.524273} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2040.717874] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 58691f22-5acd-45db-b587-df784a000813/58691f22-5acd-45db-b587-df784a000813.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2040.718151] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: 58691f22-5acd-45db-b587-df784a000813] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2040.718402] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2cdf64db-753a-47bf-8868-eb188741a546 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.723114] env[62405]: DEBUG oslo_vmware.api [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Task: {'id': task-1948144, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2040.726951] env[62405]: DEBUG oslo_vmware.api [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for the task: (returnval){ [ 2040.726951] env[62405]: value = "task-1948145" [ 2040.726951] env[62405]: _type = "Task" [ 2040.726951] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2040.733823] env[62405]: DEBUG oslo_vmware.api [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Waiting for the task: (returnval){ [ 2040.733823] env[62405]: value = "task-1948146" [ 2040.733823] env[62405]: _type = "Task" [ 2040.733823] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2040.734050] env[62405]: DEBUG oslo_vmware.api [None req-a72b875d-2a07-447e-8f34-0e5f0d352323 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Task: {'id': task-1948141, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.170628} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2040.734652] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-a72b875d-2a07-447e-8f34-0e5f0d352323 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2040.734866] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-a72b875d-2a07-447e-8f34-0e5f0d352323 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 4d59d9fd-23df-4933-97ed-32602e51e9aa] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2040.735076] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-a72b875d-2a07-447e-8f34-0e5f0d352323 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 4d59d9fd-23df-4933-97ed-32602e51e9aa] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2040.735246] env[62405]: INFO nova.compute.manager [None req-a72b875d-2a07-447e-8f34-0e5f0d352323 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] [instance: 4d59d9fd-23df-4933-97ed-32602e51e9aa] Took 1.15 seconds to destroy the instance on the hypervisor. [ 2040.735473] env[62405]: DEBUG oslo.service.loopingcall [None req-a72b875d-2a07-447e-8f34-0e5f0d352323 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2040.738523] env[62405]: DEBUG nova.compute.manager [-] [instance: 4d59d9fd-23df-4933-97ed-32602e51e9aa] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2040.738626] env[62405]: DEBUG nova.network.neutron [-] [instance: 4d59d9fd-23df-4933-97ed-32602e51e9aa] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2040.743350] env[62405]: DEBUG oslo_vmware.api [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948145, 'name': Rename_Task} progress is 10%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2040.748038] env[62405]: DEBUG oslo_vmware.api [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Task: {'id': task-1948146, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2040.805990] env[62405]: DEBUG oslo_vmware.api [None req-808d3590-cf6d-4efe-9fde-aeca2bdcd299 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1948142, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2040.872360] env[62405]: DEBUG oslo_concurrency.lockutils [None req-20353284-96d8-458e-a74f-940b8b6e8389 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.841s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2040.875112] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7e4ec1a4-19ed-4de5-9afa-99a0a36c8f8a tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.941s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2040.875439] env[62405]: DEBUG nova.objects.instance [None req-7e4ec1a4-19ed-4de5-9afa-99a0a36c8f8a tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Lazy-loading 'resources' on Instance uuid d186b2f4-3fd1-44be-b8a4-080972aff3a0 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2040.898540] env[62405]: INFO nova.scheduler.client.report [None req-20353284-96d8-458e-a74f-940b8b6e8389 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Deleted allocations for instance 556e1bca-f2f1-4200-96df-997d48ce5a15 [ 2041.010382] env[62405]: DEBUG oslo_vmware.api [None req-cbdd435f-fbb9-4c9a-a640-a07f5ec6da82 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1948143, 'name': PowerOffVM_Task, 'duration_secs': 0.384534} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2041.010670] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-cbdd435f-fbb9-4c9a-a640-a07f5ec6da82 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2041.010837] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-cbdd435f-fbb9-4c9a-a640-a07f5ec6da82 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2041.011204] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1f6bda48-6a54-4059-9dd4-8d9fbaf754bf {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2041.058213] env[62405]: DEBUG nova.compute.manager [req-aa89060f-1e81-4fc9-801d-386958abad31 req-3e40f9cf-43ec-4350-9992-203abcbbc867 service nova] [instance: 4d59d9fd-23df-4933-97ed-32602e51e9aa] Received event network-vif-deleted-d7c38983-3ca5-4934-af4a-1bf5f845ec9a {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2041.058445] env[62405]: INFO nova.compute.manager [req-aa89060f-1e81-4fc9-801d-386958abad31 req-3e40f9cf-43ec-4350-9992-203abcbbc867 service nova] [instance: 4d59d9fd-23df-4933-97ed-32602e51e9aa] Neutron deleted interface d7c38983-3ca5-4934-af4a-1bf5f845ec9a; detaching it from the instance and deleting it from the info cache [ 2041.058614] env[62405]: DEBUG nova.network.neutron [req-aa89060f-1e81-4fc9-801d-386958abad31 req-3e40f9cf-43ec-4350-9992-203abcbbc867 service nova] [instance: 4d59d9fd-23df-4933-97ed-32602e51e9aa] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2041.221743] env[62405]: DEBUG oslo_vmware.api [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Task: {'id': task-1948144, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2041.222991] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-cbdd435f-fbb9-4c9a-a640-a07f5ec6da82 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2041.223233] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-cbdd435f-fbb9-4c9a-a640-a07f5ec6da82 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2041.223418] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-cbdd435f-fbb9-4c9a-a640-a07f5ec6da82 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Deleting the datastore file [datastore1] 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2041.223666] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6707a9a9-2b87-4dc8-ae65-076c20b26fe1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2041.232225] env[62405]: DEBUG oslo_vmware.api [None req-cbdd435f-fbb9-4c9a-a640-a07f5ec6da82 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Waiting for the task: (returnval){ [ 2041.232225] env[62405]: value = "task-1948148" [ 2041.232225] env[62405]: _type = "Task" [ 2041.232225] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2041.238374] env[62405]: DEBUG oslo_vmware.api [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948145, 'name': Rename_Task, 'duration_secs': 0.225774} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2041.242186] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 46b794f6-e858-45e6-9977-98ab246482f3] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2041.242451] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-508e0f64-42fb-46d4-870e-ee418d201c1a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2041.247277] env[62405]: DEBUG oslo_vmware.api [None req-cbdd435f-fbb9-4c9a-a640-a07f5ec6da82 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1948148, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2041.251239] env[62405]: DEBUG oslo_vmware.api [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Task: {'id': task-1948146, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082795} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2041.252406] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: 58691f22-5acd-45db-b587-df784a000813] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2041.252751] env[62405]: DEBUG oslo_vmware.api [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for the task: (returnval){ [ 2041.252751] env[62405]: value = "task-1948149" [ 2041.252751] env[62405]: _type = "Task" [ 2041.252751] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2041.253434] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1af9803a-be4d-4aae-9f79-26b39609e0c3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2041.263588] env[62405]: DEBUG oslo_vmware.api [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948149, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2041.286087] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: 58691f22-5acd-45db-b587-df784a000813] Reconfiguring VM instance instance-00000070 to attach disk [datastore1] 58691f22-5acd-45db-b587-df784a000813/58691f22-5acd-45db-b587-df784a000813.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2041.286432] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7c181f4e-2803-40a2-99f3-1224212e78c3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2041.311485] env[62405]: DEBUG oslo_vmware.api [None req-808d3590-cf6d-4efe-9fde-aeca2bdcd299 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1948142, 'name': ReconfigVM_Task, 'duration_secs': 0.52122} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2041.312847] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-808d3590-cf6d-4efe-9fde-aeca2bdcd299 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Reconfigured VM instance instance-0000006a to attach disk [datastore1] volume-caee8648-7be2-4e64-811e-8bad831e1865/volume-caee8648-7be2-4e64-811e-8bad831e1865.vmdk or device None with type thin {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2041.317662] env[62405]: DEBUG oslo_vmware.api [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Waiting for the task: (returnval){ [ 2041.317662] env[62405]: value = "task-1948150" [ 2041.317662] env[62405]: _type = "Task" [ 2041.317662] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2041.317899] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-07a5b7d9-81a0-40d0-b913-4af0bc96662c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2041.338572] env[62405]: DEBUG oslo_vmware.api [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Task: {'id': task-1948150, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2041.340040] env[62405]: DEBUG oslo_vmware.api [None req-808d3590-cf6d-4efe-9fde-aeca2bdcd299 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Waiting for the task: (returnval){ [ 2041.340040] env[62405]: value = "task-1948151" [ 2041.340040] env[62405]: _type = "Task" [ 2041.340040] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2041.348099] env[62405]: DEBUG oslo_vmware.api [None req-808d3590-cf6d-4efe-9fde-aeca2bdcd299 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1948151, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2041.407410] env[62405]: DEBUG oslo_concurrency.lockutils [None req-20353284-96d8-458e-a74f-940b8b6e8389 tempest-AttachInterfacesTestJSON-723496939 tempest-AttachInterfacesTestJSON-723496939-project-member] Lock "556e1bca-f2f1-4200-96df-997d48ce5a15" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.333s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2041.534603] env[62405]: DEBUG nova.network.neutron [-] [instance: 4d59d9fd-23df-4933-97ed-32602e51e9aa] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2041.562649] env[62405]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2981eb80-4e88-424e-9395-5c619563ec59 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2041.574604] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce85dd6e-e28d-464b-97d5-2159b3abdafa {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2041.611728] env[62405]: DEBUG nova.compute.manager [req-aa89060f-1e81-4fc9-801d-386958abad31 req-3e40f9cf-43ec-4350-9992-203abcbbc867 service nova] [instance: 4d59d9fd-23df-4933-97ed-32602e51e9aa] Detach interface failed, port_id=d7c38983-3ca5-4934-af4a-1bf5f845ec9a, reason: Instance 4d59d9fd-23df-4933-97ed-32602e51e9aa could not be found. {{(pid=62405) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11483}} [ 2041.634647] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1be48fda-6511-43f7-bb1c-bb2378b096d9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2041.643499] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dec1be0-02f1-45f5-a800-636e4635f629 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2041.676314] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-650db5f8-fa12-4d7e-a3df-42b03492af38 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2041.683951] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf7f3fcf-1fea-44b4-b0e5-61dd50d0ae49 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2041.697588] env[62405]: DEBUG nova.compute.provider_tree [None req-7e4ec1a4-19ed-4de5-9afa-99a0a36c8f8a tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2041.719872] env[62405]: DEBUG oslo_vmware.api [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Task: {'id': task-1948144, 'name': ReconfigVM_Task, 'duration_secs': 0.716745} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2041.720171] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] [instance: 2e6ca3c8-d7e1-4a5d-9fdd-aa944fd9c2f8] Reconfigured VM instance instance-0000006e to attach disk [datastore1] 2e6ca3c8-d7e1-4a5d-9fdd-aa944fd9c2f8/2e6ca3c8-d7e1-4a5d-9fdd-aa944fd9c2f8.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2041.720830] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f3da22dd-ee61-4b89-a307-67934d69d6e7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2041.727145] env[62405]: DEBUG oslo_vmware.api [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Waiting for the task: (returnval){ [ 2041.727145] env[62405]: value = "task-1948152" [ 2041.727145] env[62405]: _type = "Task" [ 2041.727145] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2041.736669] env[62405]: DEBUG oslo_vmware.api [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Task: {'id': task-1948152, 'name': Rename_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2041.744376] env[62405]: DEBUG oslo_vmware.api [None req-cbdd435f-fbb9-4c9a-a640-a07f5ec6da82 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Task: {'id': task-1948148, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.364043} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2041.744616] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-cbdd435f-fbb9-4c9a-a640-a07f5ec6da82 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2041.744830] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-cbdd435f-fbb9-4c9a-a640-a07f5ec6da82 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2041.744999] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-cbdd435f-fbb9-4c9a-a640-a07f5ec6da82 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2041.745231] env[62405]: INFO nova.compute.manager [None req-cbdd435f-fbb9-4c9a-a640-a07f5ec6da82 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Took 1.26 seconds to destroy the instance on the hypervisor. [ 2041.745844] env[62405]: DEBUG oslo.service.loopingcall [None req-cbdd435f-fbb9-4c9a-a640-a07f5ec6da82 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2041.745844] env[62405]: DEBUG nova.compute.manager [-] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2041.745844] env[62405]: DEBUG nova.network.neutron [-] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2041.765539] env[62405]: DEBUG oslo_vmware.api [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948149, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2041.841672] env[62405]: DEBUG oslo_vmware.api [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Task: {'id': task-1948150, 'name': ReconfigVM_Task, 'duration_secs': 0.310884} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2041.844950] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: 58691f22-5acd-45db-b587-df784a000813] Reconfigured VM instance instance-00000070 to attach disk [datastore1] 58691f22-5acd-45db-b587-df784a000813/58691f22-5acd-45db-b587-df784a000813.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2041.845681] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a534be83-66bc-4cdf-9975-7005803ae8eb {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2041.852367] env[62405]: DEBUG oslo_vmware.api [None req-808d3590-cf6d-4efe-9fde-aeca2bdcd299 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1948151, 'name': ReconfigVM_Task, 'duration_secs': 0.148359} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2041.853641] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-808d3590-cf6d-4efe-9fde-aeca2bdcd299 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-401585', 'volume_id': 'caee8648-7be2-4e64-811e-8bad831e1865', 'name': 'volume-caee8648-7be2-4e64-811e-8bad831e1865', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '81d9be97-9147-4754-80c2-68c1a389842e', 'attached_at': '', 'detached_at': '', 'volume_id': 'caee8648-7be2-4e64-811e-8bad831e1865', 'serial': 'caee8648-7be2-4e64-811e-8bad831e1865'} {{(pid=62405) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 2041.855261] env[62405]: DEBUG oslo_vmware.api [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Waiting for the task: (returnval){ [ 2041.855261] env[62405]: value = "task-1948153" [ 2041.855261] env[62405]: _type = "Task" [ 2041.855261] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2041.865168] env[62405]: DEBUG oslo_vmware.api [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Task: {'id': task-1948153, 'name': Rename_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2041.948718] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Acquiring lock "refresh_cache-d186b2f4-3fd1-44be-b8a4-080972aff3a0" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2041.948902] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Acquired lock "refresh_cache-d186b2f4-3fd1-44be-b8a4-080972aff3a0" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2041.949113] env[62405]: DEBUG nova.network.neutron [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Forcefully refreshing network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 2042.037560] env[62405]: INFO nova.compute.manager [-] [instance: 4d59d9fd-23df-4933-97ed-32602e51e9aa] Took 1.30 seconds to deallocate network for instance. [ 2042.201478] env[62405]: DEBUG nova.scheduler.client.report [None req-7e4ec1a4-19ed-4de5-9afa-99a0a36c8f8a tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2042.237341] env[62405]: DEBUG oslo_vmware.api [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Task: {'id': task-1948152, 'name': Rename_Task, 'duration_secs': 0.16029} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2042.237607] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] [instance: 2e6ca3c8-d7e1-4a5d-9fdd-aa944fd9c2f8] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2042.237846] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0022699e-1a14-4a56-9a3f-41c6ec4b076e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.244398] env[62405]: DEBUG oslo_vmware.api [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Waiting for the task: (returnval){ [ 2042.244398] env[62405]: value = "task-1948154" [ 2042.244398] env[62405]: _type = "Task" [ 2042.244398] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2042.252053] env[62405]: DEBUG oslo_vmware.api [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Task: {'id': task-1948154, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2042.265176] env[62405]: DEBUG oslo_vmware.api [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948149, 'name': PowerOnVM_Task, 'duration_secs': 0.886641} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2042.265442] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 46b794f6-e858-45e6-9977-98ab246482f3] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2042.265649] env[62405]: INFO nova.compute.manager [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 46b794f6-e858-45e6-9977-98ab246482f3] Took 12.20 seconds to spawn the instance on the hypervisor. [ 2042.265833] env[62405]: DEBUG nova.compute.manager [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 46b794f6-e858-45e6-9977-98ab246482f3] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2042.266713] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05665084-baa1-4607-9019-c59d744bdd1e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.367823] env[62405]: DEBUG oslo_vmware.api [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Task: {'id': task-1948153, 'name': Rename_Task, 'duration_secs': 0.143777} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2042.367949] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: 58691f22-5acd-45db-b587-df784a000813] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2042.368214] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c1cd97f8-f5aa-4cd8-9e69-c202eaaed4bb {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.373858] env[62405]: DEBUG oslo_vmware.api [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Waiting for the task: (returnval){ [ 2042.373858] env[62405]: value = "task-1948155" [ 2042.373858] env[62405]: _type = "Task" [ 2042.373858] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2042.381481] env[62405]: DEBUG oslo_vmware.api [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Task: {'id': task-1948155, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2042.481222] env[62405]: DEBUG nova.network.neutron [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2042.522724] env[62405]: DEBUG nova.network.neutron [-] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2042.544959] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a72b875d-2a07-447e-8f34-0e5f0d352323 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2042.706330] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7e4ec1a4-19ed-4de5-9afa-99a0a36c8f8a tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.831s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2042.709415] env[62405]: DEBUG oslo_concurrency.lockutils [None req-bc39d78b-ba71-49b9-a985-c34c79513351 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.763s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2042.709415] env[62405]: DEBUG nova.objects.instance [None req-bc39d78b-ba71-49b9-a985-c34c79513351 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Lazy-loading 'resources' on Instance uuid 14512ed2-9eae-4753-b83c-8c0d0d5d9432 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2042.731998] env[62405]: INFO nova.scheduler.client.report [None req-7e4ec1a4-19ed-4de5-9afa-99a0a36c8f8a tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Deleted allocations for instance d186b2f4-3fd1-44be-b8a4-080972aff3a0 [ 2042.756246] env[62405]: DEBUG oslo_vmware.api [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Task: {'id': task-1948154, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2042.785027] env[62405]: INFO nova.compute.manager [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 46b794f6-e858-45e6-9977-98ab246482f3] Took 20.73 seconds to build instance. [ 2042.884248] env[62405]: DEBUG oslo_vmware.api [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Task: {'id': task-1948155, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2042.901439] env[62405]: DEBUG nova.objects.instance [None req-808d3590-cf6d-4efe-9fde-aeca2bdcd299 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Lazy-loading 'flavor' on Instance uuid 81d9be97-9147-4754-80c2-68c1a389842e {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2043.026139] env[62405]: INFO nova.compute.manager [-] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Took 1.28 seconds to deallocate network for instance. [ 2043.070702] env[62405]: DEBUG nova.network.neutron [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2043.093355] env[62405]: DEBUG nova.compute.manager [req-47cb62e0-e775-431e-9119-d337813cb476 req-89c3f61e-b96d-49d7-90ee-2ba66220734f service nova] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Received event network-vif-deleted-04edbefd-e96c-47d6-bfd7-72fb2a759156 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2043.241131] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7e4ec1a4-19ed-4de5-9afa-99a0a36c8f8a tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Lock "d186b2f4-3fd1-44be-b8a4-080972aff3a0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.884s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2043.261611] env[62405]: DEBUG oslo_vmware.api [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Task: {'id': task-1948154, 'name': PowerOnVM_Task, 'duration_secs': 0.522552} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2043.265215] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] [instance: 2e6ca3c8-d7e1-4a5d-9fdd-aa944fd9c2f8] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2043.265654] env[62405]: INFO nova.compute.manager [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] [instance: 2e6ca3c8-d7e1-4a5d-9fdd-aa944fd9c2f8] Took 14.55 seconds to spawn the instance on the hypervisor. [ 2043.266080] env[62405]: DEBUG nova.compute.manager [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] [instance: 2e6ca3c8-d7e1-4a5d-9fdd-aa944fd9c2f8] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2043.268384] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0e57c51-4e2b-4c6f-b423-8a3ed3625de2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2043.287868] env[62405]: DEBUG oslo_concurrency.lockutils [None req-73bbf45b-c89a-41c0-b339-346fbe430f0f tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Lock "46b794f6-e858-45e6-9977-98ab246482f3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.245s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2043.384313] env[62405]: DEBUG oslo_vmware.api [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Task: {'id': task-1948155, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2043.406358] env[62405]: DEBUG oslo_concurrency.lockutils [None req-808d3590-cf6d-4efe-9fde-aeca2bdcd299 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Lock "81d9be97-9147-4754-80c2-68c1a389842e" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.812s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2043.463555] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3692f057-7c8a-4604-a7ab-ab617d5e6185 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2043.472898] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd11b4b2-bdd0-433f-9d19-b5fbd78cc0b8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2043.478395] env[62405]: DEBUG nova.compute.manager [req-c917bc8a-dbb7-41f3-923b-c7e407e578e8 req-ec316c1e-ae5e-47e2-af59-f8a870305fa1 service nova] [instance: 46b794f6-e858-45e6-9977-98ab246482f3] Received event network-changed-1cb8209b-2a23-499d-b852-91ad4d89784e {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2043.478648] env[62405]: DEBUG nova.compute.manager [req-c917bc8a-dbb7-41f3-923b-c7e407e578e8 req-ec316c1e-ae5e-47e2-af59-f8a870305fa1 service nova] [instance: 46b794f6-e858-45e6-9977-98ab246482f3] Refreshing instance network info cache due to event network-changed-1cb8209b-2a23-499d-b852-91ad4d89784e. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 2043.478907] env[62405]: DEBUG oslo_concurrency.lockutils [req-c917bc8a-dbb7-41f3-923b-c7e407e578e8 req-ec316c1e-ae5e-47e2-af59-f8a870305fa1 service nova] Acquiring lock "refresh_cache-46b794f6-e858-45e6-9977-98ab246482f3" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2043.479107] env[62405]: DEBUG oslo_concurrency.lockutils [req-c917bc8a-dbb7-41f3-923b-c7e407e578e8 req-ec316c1e-ae5e-47e2-af59-f8a870305fa1 service nova] Acquired lock "refresh_cache-46b794f6-e858-45e6-9977-98ab246482f3" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2043.479313] env[62405]: DEBUG nova.network.neutron [req-c917bc8a-dbb7-41f3-923b-c7e407e578e8 req-ec316c1e-ae5e-47e2-af59-f8a870305fa1 service nova] [instance: 46b794f6-e858-45e6-9977-98ab246482f3] Refreshing network info cache for port 1cb8209b-2a23-499d-b852-91ad4d89784e {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2043.513580] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b644871a-0d71-4b37-b7ea-41b72bc0500f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2043.520934] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9837310d-d66d-4f23-aa79-305babc3fc66 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2043.536155] env[62405]: DEBUG oslo_concurrency.lockutils [None req-cbdd435f-fbb9-4c9a-a640-a07f5ec6da82 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2043.537091] env[62405]: DEBUG nova.compute.provider_tree [None req-bc39d78b-ba71-49b9-a985-c34c79513351 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2043.573038] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Releasing lock "refresh_cache-d186b2f4-3fd1-44be-b8a4-080972aff3a0" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2043.573038] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Updated the network info_cache for instance {{(pid=62405) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10517}} [ 2043.573038] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2043.573238] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2043.573238] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2043.573539] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2043.573539] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2043.573650] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2043.573797] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62405) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11065}} [ 2043.573872] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager.update_available_resource {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2043.794197] env[62405]: INFO nova.compute.manager [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] [instance: 2e6ca3c8-d7e1-4a5d-9fdd-aa944fd9c2f8] Took 24.09 seconds to build instance. [ 2043.886828] env[62405]: DEBUG oslo_vmware.api [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Task: {'id': task-1948155, 'name': PowerOnVM_Task, 'duration_secs': 1.510292} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2043.887047] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: 58691f22-5acd-45db-b587-df784a000813] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2043.887367] env[62405]: INFO nova.compute.manager [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: 58691f22-5acd-45db-b587-df784a000813] Took 9.57 seconds to spawn the instance on the hypervisor. [ 2043.887456] env[62405]: DEBUG nova.compute.manager [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: 58691f22-5acd-45db-b587-df784a000813] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2043.888285] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d630f958-9999-44e8-9303-4f87f18de922 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2044.041247] env[62405]: DEBUG nova.scheduler.client.report [None req-bc39d78b-ba71-49b9-a985-c34c79513351 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2044.079782] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2044.098400] env[62405]: INFO nova.compute.manager [None req-2c7ecfdd-a9af-426e-9454-b86ca7fc01a1 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Rescuing [ 2044.098695] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2c7ecfdd-a9af-426e-9454-b86ca7fc01a1 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Acquiring lock "refresh_cache-81d9be97-9147-4754-80c2-68c1a389842e" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2044.098849] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2c7ecfdd-a9af-426e-9454-b86ca7fc01a1 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Acquired lock "refresh_cache-81d9be97-9147-4754-80c2-68c1a389842e" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2044.099022] env[62405]: DEBUG nova.network.neutron [None req-2c7ecfdd-a9af-426e-9454-b86ca7fc01a1 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2044.296714] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3854056d-0e79-4895-a1f1-0aae0f899fc7 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Lock "2e6ca3c8-d7e1-4a5d-9fdd-aa944fd9c2f8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.608s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2044.397361] env[62405]: DEBUG nova.network.neutron [req-c917bc8a-dbb7-41f3-923b-c7e407e578e8 req-ec316c1e-ae5e-47e2-af59-f8a870305fa1 service nova] [instance: 46b794f6-e858-45e6-9977-98ab246482f3] Updated VIF entry in instance network info cache for port 1cb8209b-2a23-499d-b852-91ad4d89784e. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2044.397896] env[62405]: DEBUG nova.network.neutron [req-c917bc8a-dbb7-41f3-923b-c7e407e578e8 req-ec316c1e-ae5e-47e2-af59-f8a870305fa1 service nova] [instance: 46b794f6-e858-45e6-9977-98ab246482f3] Updating instance_info_cache with network_info: [{"id": "1cb8209b-2a23-499d-b852-91ad4d89784e", "address": "fa:16:3e:c5:15:8c", "network": {"id": "a7be14dc-1fb5-4a85-a574-ff7086958535", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-359688506-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.188", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03a423f493034065bb1591d14d215ed8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77aa121f-8fb6-42f3-aaea-43addfe449b2", "external-id": "nsx-vlan-transportzone-288", "segmentation_id": 288, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1cb8209b-2a", "ovs_interfaceid": "1cb8209b-2a23-499d-b852-91ad4d89784e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2044.404862] env[62405]: INFO nova.compute.manager [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: 58691f22-5acd-45db-b587-df784a000813] Took 16.60 seconds to build instance. [ 2044.471330] env[62405]: DEBUG oslo_concurrency.lockutils [None req-113f41d3-775b-47ff-8f3f-a736dbd5ade1 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Acquiring lock "2e6ca3c8-d7e1-4a5d-9fdd-aa944fd9c2f8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2044.471626] env[62405]: DEBUG oslo_concurrency.lockutils [None req-113f41d3-775b-47ff-8f3f-a736dbd5ade1 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Lock "2e6ca3c8-d7e1-4a5d-9fdd-aa944fd9c2f8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2044.471888] env[62405]: DEBUG oslo_concurrency.lockutils [None req-113f41d3-775b-47ff-8f3f-a736dbd5ade1 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Acquiring lock "2e6ca3c8-d7e1-4a5d-9fdd-aa944fd9c2f8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2044.472123] env[62405]: DEBUG oslo_concurrency.lockutils [None req-113f41d3-775b-47ff-8f3f-a736dbd5ade1 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Lock "2e6ca3c8-d7e1-4a5d-9fdd-aa944fd9c2f8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2044.472337] env[62405]: DEBUG oslo_concurrency.lockutils [None req-113f41d3-775b-47ff-8f3f-a736dbd5ade1 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Lock "2e6ca3c8-d7e1-4a5d-9fdd-aa944fd9c2f8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2044.475129] env[62405]: INFO nova.compute.manager [None req-113f41d3-775b-47ff-8f3f-a736dbd5ade1 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] [instance: 2e6ca3c8-d7e1-4a5d-9fdd-aa944fd9c2f8] Terminating instance [ 2044.551293] env[62405]: DEBUG oslo_concurrency.lockutils [None req-bc39d78b-ba71-49b9-a985-c34c79513351 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.841s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2044.553085] env[62405]: DEBUG oslo_concurrency.lockutils [None req-6c6ca6f9-5da5-4615-889d-d4a7d9e344fa tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 5.993s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2044.580742] env[62405]: INFO nova.scheduler.client.report [None req-bc39d78b-ba71-49b9-a985-c34c79513351 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Deleted allocations for instance 14512ed2-9eae-4753-b83c-8c0d0d5d9432 [ 2044.904257] env[62405]: DEBUG oslo_concurrency.lockutils [req-c917bc8a-dbb7-41f3-923b-c7e407e578e8 req-ec316c1e-ae5e-47e2-af59-f8a870305fa1 service nova] Releasing lock "refresh_cache-46b794f6-e858-45e6-9977-98ab246482f3" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2044.906897] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b3661b47-c9d9-45df-a066-f084e52eaf7e tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Lock "58691f22-5acd-45db-b587-df784a000813" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.113s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2044.978452] env[62405]: DEBUG nova.compute.manager [None req-113f41d3-775b-47ff-8f3f-a736dbd5ade1 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] [instance: 2e6ca3c8-d7e1-4a5d-9fdd-aa944fd9c2f8] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2044.978688] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-113f41d3-775b-47ff-8f3f-a736dbd5ade1 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] [instance: 2e6ca3c8-d7e1-4a5d-9fdd-aa944fd9c2f8] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2044.979984] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-897e7ece-5ef6-4ccc-a1f8-ddd520853cd4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2044.989229] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-113f41d3-775b-47ff-8f3f-a736dbd5ade1 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] [instance: 2e6ca3c8-d7e1-4a5d-9fdd-aa944fd9c2f8] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2044.989503] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4d53daf0-1325-4978-afac-20e15fbb4422 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2044.995758] env[62405]: DEBUG oslo_vmware.api [None req-113f41d3-775b-47ff-8f3f-a736dbd5ade1 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Waiting for the task: (returnval){ [ 2044.995758] env[62405]: value = "task-1948156" [ 2044.995758] env[62405]: _type = "Task" [ 2044.995758] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2045.003971] env[62405]: DEBUG oslo_vmware.api [None req-113f41d3-775b-47ff-8f3f-a736dbd5ade1 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Task: {'id': task-1948156, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2045.057244] env[62405]: DEBUG nova.objects.instance [None req-6c6ca6f9-5da5-4615-889d-d4a7d9e344fa tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Lazy-loading 'migration_context' on Instance uuid c39d9059-8da4-4c8d-99ab-d66b8445e7da {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2045.059406] env[62405]: DEBUG nova.network.neutron [None req-2c7ecfdd-a9af-426e-9454-b86ca7fc01a1 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Updating instance_info_cache with network_info: [{"id": "2ba16494-2db9-4083-9a27-d4f12dac6ba1", "address": "fa:16:3e:66:df:57", "network": {"id": "bb161d6c-1986-486e-a6b7-5b1dacc985ee", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-590658377-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.213", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7541d8c77a3f434094bc30a4d402bfcb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ba16494-2d", "ovs_interfaceid": "2ba16494-2db9-4083-9a27-d4f12dac6ba1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2045.088293] env[62405]: DEBUG oslo_concurrency.lockutils [None req-bc39d78b-ba71-49b9-a985-c34c79513351 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Lock "14512ed2-9eae-4753-b83c-8c0d0d5d9432" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.443s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2045.505708] env[62405]: DEBUG oslo_vmware.api [None req-113f41d3-775b-47ff-8f3f-a736dbd5ade1 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Task: {'id': task-1948156, 'name': PowerOffVM_Task, 'duration_secs': 0.340965} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2045.505991] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-113f41d3-775b-47ff-8f3f-a736dbd5ade1 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] [instance: 2e6ca3c8-d7e1-4a5d-9fdd-aa944fd9c2f8] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2045.506179] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-113f41d3-775b-47ff-8f3f-a736dbd5ade1 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] [instance: 2e6ca3c8-d7e1-4a5d-9fdd-aa944fd9c2f8] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2045.506424] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1a222e2e-f894-41ce-ad73-e63534d4ecda {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2045.563172] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2c7ecfdd-a9af-426e-9454-b86ca7fc01a1 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Releasing lock "refresh_cache-81d9be97-9147-4754-80c2-68c1a389842e" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2045.697666] env[62405]: INFO nova.compute.manager [None req-bc01e767-50cc-489b-9c16-7498d4b15bf9 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: 58691f22-5acd-45db-b587-df784a000813] Rescuing [ 2045.697943] env[62405]: DEBUG oslo_concurrency.lockutils [None req-bc01e767-50cc-489b-9c16-7498d4b15bf9 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Acquiring lock "refresh_cache-58691f22-5acd-45db-b587-df784a000813" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2045.698113] env[62405]: DEBUG oslo_concurrency.lockutils [None req-bc01e767-50cc-489b-9c16-7498d4b15bf9 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Acquired lock "refresh_cache-58691f22-5acd-45db-b587-df784a000813" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2045.698284] env[62405]: DEBUG nova.network.neutron [None req-bc01e767-50cc-489b-9c16-7498d4b15bf9 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: 58691f22-5acd-45db-b587-df784a000813] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2045.700997] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-113f41d3-775b-47ff-8f3f-a736dbd5ade1 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] [instance: 2e6ca3c8-d7e1-4a5d-9fdd-aa944fd9c2f8] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2045.701705] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-113f41d3-775b-47ff-8f3f-a736dbd5ade1 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] [instance: 2e6ca3c8-d7e1-4a5d-9fdd-aa944fd9c2f8] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2045.702303] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-113f41d3-775b-47ff-8f3f-a736dbd5ade1 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Deleting the datastore file [datastore1] 2e6ca3c8-d7e1-4a5d-9fdd-aa944fd9c2f8 {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2045.703220] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dd236776-dc10-4bf6-afc6-0ffd662e0a41 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2045.710416] env[62405]: DEBUG oslo_vmware.api [None req-113f41d3-775b-47ff-8f3f-a736dbd5ade1 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Waiting for the task: (returnval){ [ 2045.710416] env[62405]: value = "task-1948158" [ 2045.710416] env[62405]: _type = "Task" [ 2045.710416] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2045.724302] env[62405]: DEBUG oslo_vmware.api [None req-113f41d3-775b-47ff-8f3f-a736dbd5ade1 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Task: {'id': task-1948158, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2045.781543] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4928b6ec-83be-473b-a20f-a52ab832b35f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2045.790230] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ae77bb2-99d2-45dd-bc5d-89c9e2d83da0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2045.821450] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f463f324-0155-4468-a1f1-985dfa227344 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2045.829033] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d53b1b9-c527-489a-9e7a-0324bc0d5aef {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2045.842055] env[62405]: DEBUG nova.compute.provider_tree [None req-6c6ca6f9-5da5-4615-889d-d4a7d9e344fa tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2046.224485] env[62405]: DEBUG oslo_vmware.api [None req-113f41d3-775b-47ff-8f3f-a736dbd5ade1 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Task: {'id': task-1948158, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.190486} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2046.225218] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-113f41d3-775b-47ff-8f3f-a736dbd5ade1 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2046.225218] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-113f41d3-775b-47ff-8f3f-a736dbd5ade1 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] [instance: 2e6ca3c8-d7e1-4a5d-9fdd-aa944fd9c2f8] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2046.225218] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-113f41d3-775b-47ff-8f3f-a736dbd5ade1 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] [instance: 2e6ca3c8-d7e1-4a5d-9fdd-aa944fd9c2f8] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2046.225218] env[62405]: INFO nova.compute.manager [None req-113f41d3-775b-47ff-8f3f-a736dbd5ade1 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] [instance: 2e6ca3c8-d7e1-4a5d-9fdd-aa944fd9c2f8] Took 1.25 seconds to destroy the instance on the hypervisor. [ 2046.225901] env[62405]: DEBUG oslo.service.loopingcall [None req-113f41d3-775b-47ff-8f3f-a736dbd5ade1 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2046.225901] env[62405]: DEBUG nova.compute.manager [-] [instance: 2e6ca3c8-d7e1-4a5d-9fdd-aa944fd9c2f8] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2046.225901] env[62405]: DEBUG nova.network.neutron [-] [instance: 2e6ca3c8-d7e1-4a5d-9fdd-aa944fd9c2f8] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2046.345373] env[62405]: DEBUG nova.scheduler.client.report [None req-6c6ca6f9-5da5-4615-889d-d4a7d9e344fa tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2046.592619] env[62405]: DEBUG nova.network.neutron [None req-bc01e767-50cc-489b-9c16-7498d4b15bf9 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: 58691f22-5acd-45db-b587-df784a000813] Updating instance_info_cache with network_info: [{"id": "c38487c8-b41a-4c0c-8103-3392186dbdee", "address": "fa:16:3e:66:a1:15", "network": {"id": "006b4fbf-fefb-47b8-b2e9-30e8308e87b9", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-369344299-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "4a78c04608454ac88ecb97b4c87a9d17", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6815237d-f565-474d-a3c0-9c675478eb00", "external-id": "nsx-vlan-transportzone-526", "segmentation_id": 526, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc38487c8-b4", "ovs_interfaceid": "c38487c8-b41a-4c0c-8103-3392186dbdee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2046.727192] env[62405]: DEBUG oslo_concurrency.lockutils [None req-eb10086d-a8c6-46c3-9f57-3125ebe3729b tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Acquiring lock "f1e9a2e7-0fd3-4a89-8c33-bab6d1987230" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2046.727821] env[62405]: DEBUG oslo_concurrency.lockutils [None req-eb10086d-a8c6-46c3-9f57-3125ebe3729b tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Lock "f1e9a2e7-0fd3-4a89-8c33-bab6d1987230" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2046.745695] env[62405]: DEBUG nova.compute.manager [req-98d0dd62-3160-441d-afec-f01d98db5f93 req-1c5e0fcd-af84-439d-9036-ec88ed01707f service nova] [instance: 2e6ca3c8-d7e1-4a5d-9fdd-aa944fd9c2f8] Received event network-vif-deleted-6c5dc5af-ff6d-4205-a204-1c594c3c805a {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2046.745909] env[62405]: INFO nova.compute.manager [req-98d0dd62-3160-441d-afec-f01d98db5f93 req-1c5e0fcd-af84-439d-9036-ec88ed01707f service nova] [instance: 2e6ca3c8-d7e1-4a5d-9fdd-aa944fd9c2f8] Neutron deleted interface 6c5dc5af-ff6d-4205-a204-1c594c3c805a; detaching it from the instance and deleting it from the info cache [ 2046.746109] env[62405]: DEBUG nova.network.neutron [req-98d0dd62-3160-441d-afec-f01d98db5f93 req-1c5e0fcd-af84-439d-9036-ec88ed01707f service nova] [instance: 2e6ca3c8-d7e1-4a5d-9fdd-aa944fd9c2f8] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2046.996863] env[62405]: DEBUG nova.network.neutron [-] [instance: 2e6ca3c8-d7e1-4a5d-9fdd-aa944fd9c2f8] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2047.096415] env[62405]: DEBUG oslo_concurrency.lockutils [None req-bc01e767-50cc-489b-9c16-7498d4b15bf9 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Releasing lock "refresh_cache-58691f22-5acd-45db-b587-df784a000813" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2047.098819] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c7ecfdd-a9af-426e-9454-b86ca7fc01a1 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2047.099186] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ba24a21d-71cd-4a96-a8c2-c719725fb278 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2047.106414] env[62405]: DEBUG oslo_vmware.api [None req-2c7ecfdd-a9af-426e-9454-b86ca7fc01a1 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Waiting for the task: (returnval){ [ 2047.106414] env[62405]: value = "task-1948159" [ 2047.106414] env[62405]: _type = "Task" [ 2047.106414] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2047.114394] env[62405]: DEBUG oslo_vmware.api [None req-2c7ecfdd-a9af-426e-9454-b86ca7fc01a1 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1948159, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2047.230250] env[62405]: DEBUG nova.compute.manager [None req-eb10086d-a8c6-46c3-9f57-3125ebe3729b tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2047.248693] env[62405]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-42a6a330-6fb4-40c3-b63d-418c29078bbc {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2047.260669] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-715ac818-a8b8-4b9a-b991-a2c1a54c75c2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2047.301386] env[62405]: DEBUG nova.compute.manager [req-98d0dd62-3160-441d-afec-f01d98db5f93 req-1c5e0fcd-af84-439d-9036-ec88ed01707f service nova] [instance: 2e6ca3c8-d7e1-4a5d-9fdd-aa944fd9c2f8] Detach interface failed, port_id=6c5dc5af-ff6d-4205-a204-1c594c3c805a, reason: Instance 2e6ca3c8-d7e1-4a5d-9fdd-aa944fd9c2f8 could not be found. {{(pid=62405) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11483}} [ 2047.357383] env[62405]: DEBUG oslo_concurrency.lockutils [None req-6c6ca6f9-5da5-4615-889d-d4a7d9e344fa tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.805s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2047.365788] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a72b875d-2a07-447e-8f34-0e5f0d352323 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.821s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2047.366122] env[62405]: DEBUG nova.objects.instance [None req-a72b875d-2a07-447e-8f34-0e5f0d352323 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Lazy-loading 'resources' on Instance uuid 4d59d9fd-23df-4933-97ed-32602e51e9aa {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2047.374107] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Acquiring lock "7c74cae9-1607-4928-a927-f0c8b86f7698" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2047.374370] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Lock "7c74cae9-1607-4928-a927-f0c8b86f7698" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2047.499130] env[62405]: INFO nova.compute.manager [-] [instance: 2e6ca3c8-d7e1-4a5d-9fdd-aa944fd9c2f8] Took 1.27 seconds to deallocate network for instance. [ 2047.615833] env[62405]: DEBUG oslo_vmware.api [None req-2c7ecfdd-a9af-426e-9454-b86ca7fc01a1 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1948159, 'name': PowerOffVM_Task, 'duration_secs': 0.302911} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2047.616058] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c7ecfdd-a9af-426e-9454-b86ca7fc01a1 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2047.616826] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59c06c17-89dd-4994-afe5-17213ac0c104 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2047.639181] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5084b3ea-5681-4e0c-ab1d-b44cac770e32 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2047.665616] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c7ecfdd-a9af-426e-9454-b86ca7fc01a1 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2047.666165] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-39bd78fb-da4f-498e-a7d3-5f0040dca174 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2047.674737] env[62405]: DEBUG oslo_vmware.api [None req-2c7ecfdd-a9af-426e-9454-b86ca7fc01a1 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Waiting for the task: (returnval){ [ 2047.674737] env[62405]: value = "task-1948160" [ 2047.674737] env[62405]: _type = "Task" [ 2047.674737] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2047.683422] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c7ecfdd-a9af-426e-9454-b86ca7fc01a1 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] VM already powered off {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 2047.683630] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-2c7ecfdd-a9af-426e-9454-b86ca7fc01a1 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2047.683867] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2c7ecfdd-a9af-426e-9454-b86ca7fc01a1 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2047.684033] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2c7ecfdd-a9af-426e-9454-b86ca7fc01a1 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2047.684218] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c7ecfdd-a9af-426e-9454-b86ca7fc01a1 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2047.684441] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9dd6a320-fdaa-440c-989d-77d48a92792d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2047.693921] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c7ecfdd-a9af-426e-9454-b86ca7fc01a1 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2047.694119] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-2c7ecfdd-a9af-426e-9454-b86ca7fc01a1 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2047.694808] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aad93e91-badf-4829-8613-4aa04fa592ea {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2047.699593] env[62405]: DEBUG oslo_vmware.api [None req-2c7ecfdd-a9af-426e-9454-b86ca7fc01a1 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Waiting for the task: (returnval){ [ 2047.699593] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52299166-d24d-1edd-2066-edf1f396ae74" [ 2047.699593] env[62405]: _type = "Task" [ 2047.699593] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2047.706807] env[62405]: DEBUG oslo_vmware.api [None req-2c7ecfdd-a9af-426e-9454-b86ca7fc01a1 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52299166-d24d-1edd-2066-edf1f396ae74, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2047.754506] env[62405]: DEBUG oslo_concurrency.lockutils [None req-eb10086d-a8c6-46c3-9f57-3125ebe3729b tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2047.878069] env[62405]: DEBUG nova.compute.manager [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 7c74cae9-1607-4928-a927-f0c8b86f7698] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2048.005984] env[62405]: DEBUG oslo_concurrency.lockutils [None req-113f41d3-775b-47ff-8f3f-a736dbd5ade1 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2048.070659] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fbec41d-f845-4b0d-8c44-9149a5f1ee8d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2048.078183] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88f3aee2-fef8-4dcd-b7fc-ceea08555f05 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2048.107695] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80a014b6-260d-48d6-90c1-3253b57f3a5b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2048.115460] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd956575-548b-4c6f-aac4-1324578b99f9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2048.129285] env[62405]: DEBUG nova.compute.provider_tree [None req-a72b875d-2a07-447e-8f34-0e5f0d352323 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2048.210294] env[62405]: DEBUG oslo_vmware.api [None req-2c7ecfdd-a9af-426e-9454-b86ca7fc01a1 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52299166-d24d-1edd-2066-edf1f396ae74, 'name': SearchDatastore_Task, 'duration_secs': 0.008969} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2048.211156] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f3777c0a-17ce-4eae-a9c1-7da318412833 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2048.216873] env[62405]: DEBUG oslo_vmware.api [None req-2c7ecfdd-a9af-426e-9454-b86ca7fc01a1 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Waiting for the task: (returnval){ [ 2048.216873] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]529d0f15-9b45-8777-2bda-e9afbf0ac200" [ 2048.216873] env[62405]: _type = "Task" [ 2048.216873] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2048.225479] env[62405]: DEBUG oslo_vmware.api [None req-2c7ecfdd-a9af-426e-9454-b86ca7fc01a1 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]529d0f15-9b45-8777-2bda-e9afbf0ac200, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2048.401307] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2048.632264] env[62405]: DEBUG nova.scheduler.client.report [None req-a72b875d-2a07-447e-8f34-0e5f0d352323 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2048.646714] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc01e767-50cc-489b-9c16-7498d4b15bf9 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: 58691f22-5acd-45db-b587-df784a000813] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2048.647175] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-66267213-b400-46a6-a2e3-f9d68e2d8f4a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2048.659945] env[62405]: DEBUG oslo_vmware.api [None req-bc01e767-50cc-489b-9c16-7498d4b15bf9 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Waiting for the task: (returnval){ [ 2048.659945] env[62405]: value = "task-1948161" [ 2048.659945] env[62405]: _type = "Task" [ 2048.659945] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2048.670989] env[62405]: DEBUG oslo_vmware.api [None req-bc01e767-50cc-489b-9c16-7498d4b15bf9 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Task: {'id': task-1948161, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2048.729580] env[62405]: DEBUG oslo_vmware.api [None req-2c7ecfdd-a9af-426e-9454-b86ca7fc01a1 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]529d0f15-9b45-8777-2bda-e9afbf0ac200, 'name': SearchDatastore_Task, 'duration_secs': 0.046902} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2048.729919] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2c7ecfdd-a9af-426e-9454-b86ca7fc01a1 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2048.730349] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c7ecfdd-a9af-426e-9454-b86ca7fc01a1 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 81d9be97-9147-4754-80c2-68c1a389842e/e6bba7a8-c2de-41dc-871a-3859bba5f4f9-rescue.vmdk. {{(pid=62405) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 2048.730814] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-308b1edc-c4a3-46f9-b303-df7b91f912ed {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2048.740063] env[62405]: DEBUG oslo_vmware.api [None req-2c7ecfdd-a9af-426e-9454-b86ca7fc01a1 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Waiting for the task: (returnval){ [ 2048.740063] env[62405]: value = "task-1948162" [ 2048.740063] env[62405]: _type = "Task" [ 2048.740063] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2048.752222] env[62405]: DEBUG oslo_vmware.api [None req-2c7ecfdd-a9af-426e-9454-b86ca7fc01a1 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1948162, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2048.909256] env[62405]: INFO nova.compute.manager [None req-6c6ca6f9-5da5-4615-889d-d4a7d9e344fa tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Swapping old allocation on dict_keys(['7d5eded7-a501-4fa6-b1d3-60e273d555d7']) held by migration 49367a81-108f-4418-8e83-5976f32abae1 for instance [ 2048.939311] env[62405]: DEBUG nova.scheduler.client.report [None req-6c6ca6f9-5da5-4615-889d-d4a7d9e344fa tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Overwriting current allocation {'allocations': {'7d5eded7-a501-4fa6-b1d3-60e273d555d7': {'resources': {'VCPU': 1, 'MEMORY_MB': 256, 'DISK_GB': 1}, 'generation': 173}}, 'project_id': 'f3b50cc219314108945bfc8b2c21849a', 'user_id': '4ad5e220132245168b59ff3df599b974', 'consumer_generation': 1} on consumer c39d9059-8da4-4c8d-99ab-d66b8445e7da {{(pid=62405) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 2049.042397] env[62405]: DEBUG oslo_concurrency.lockutils [None req-6c6ca6f9-5da5-4615-889d-d4a7d9e344fa tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Acquiring lock "refresh_cache-c39d9059-8da4-4c8d-99ab-d66b8445e7da" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2049.042622] env[62405]: DEBUG oslo_concurrency.lockutils [None req-6c6ca6f9-5da5-4615-889d-d4a7d9e344fa tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Acquired lock "refresh_cache-c39d9059-8da4-4c8d-99ab-d66b8445e7da" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2049.042813] env[62405]: DEBUG nova.network.neutron [None req-6c6ca6f9-5da5-4615-889d-d4a7d9e344fa tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2049.141136] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a72b875d-2a07-447e-8f34-0e5f0d352323 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.775s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2049.146222] env[62405]: DEBUG oslo_concurrency.lockutils [None req-cbdd435f-fbb9-4c9a-a640-a07f5ec6da82 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.609s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2049.146495] env[62405]: DEBUG nova.objects.instance [None req-cbdd435f-fbb9-4c9a-a640-a07f5ec6da82 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Lazy-loading 'resources' on Instance uuid 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2049.160165] env[62405]: INFO nova.scheduler.client.report [None req-a72b875d-2a07-447e-8f34-0e5f0d352323 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Deleted allocations for instance 4d59d9fd-23df-4933-97ed-32602e51e9aa [ 2049.174960] env[62405]: DEBUG oslo_vmware.api [None req-bc01e767-50cc-489b-9c16-7498d4b15bf9 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Task: {'id': task-1948161, 'name': PowerOffVM_Task, 'duration_secs': 0.184878} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2049.175282] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc01e767-50cc-489b-9c16-7498d4b15bf9 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: 58691f22-5acd-45db-b587-df784a000813] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2049.176113] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7e5ab9e-bcac-45b4-8906-5939928a0f01 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2049.201823] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baefa791-2166-48da-acb4-d8cf2a197a47 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2049.242464] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc01e767-50cc-489b-9c16-7498d4b15bf9 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: 58691f22-5acd-45db-b587-df784a000813] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2049.242871] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-edbab52f-786e-40e4-86fd-02d6c79da1cf {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2049.254019] env[62405]: DEBUG oslo_vmware.api [None req-bc01e767-50cc-489b-9c16-7498d4b15bf9 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Waiting for the task: (returnval){ [ 2049.254019] env[62405]: value = "task-1948163" [ 2049.254019] env[62405]: _type = "Task" [ 2049.254019] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2049.258089] env[62405]: DEBUG oslo_vmware.api [None req-2c7ecfdd-a9af-426e-9454-b86ca7fc01a1 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1948162, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2049.269942] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc01e767-50cc-489b-9c16-7498d4b15bf9 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: 58691f22-5acd-45db-b587-df784a000813] VM already powered off {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 2049.270225] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-bc01e767-50cc-489b-9c16-7498d4b15bf9 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: 58691f22-5acd-45db-b587-df784a000813] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2049.270450] env[62405]: DEBUG oslo_concurrency.lockutils [None req-bc01e767-50cc-489b-9c16-7498d4b15bf9 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2049.270613] env[62405]: DEBUG oslo_concurrency.lockutils [None req-bc01e767-50cc-489b-9c16-7498d4b15bf9 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2049.270779] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc01e767-50cc-489b-9c16-7498d4b15bf9 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2049.271064] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-df2662e8-638f-4749-8f25-cbc8e7bc18df {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2049.286968] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc01e767-50cc-489b-9c16-7498d4b15bf9 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2049.287205] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-bc01e767-50cc-489b-9c16-7498d4b15bf9 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2049.287995] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c9bfee95-7952-402f-9576-671d5a0153f1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2049.297379] env[62405]: DEBUG oslo_vmware.api [None req-bc01e767-50cc-489b-9c16-7498d4b15bf9 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Waiting for the task: (returnval){ [ 2049.297379] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52adada6-72bb-303c-9679-28a10fb5d2f7" [ 2049.297379] env[62405]: _type = "Task" [ 2049.297379] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2049.306384] env[62405]: DEBUG oslo_vmware.api [None req-bc01e767-50cc-489b-9c16-7498d4b15bf9 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52adada6-72bb-303c-9679-28a10fb5d2f7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2049.674166] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a72b875d-2a07-447e-8f34-0e5f0d352323 tempest-ServersTestJSON-1575803087 tempest-ServersTestJSON-1575803087-project-member] Lock "4d59d9fd-23df-4933-97ed-32602e51e9aa" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.594s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2049.753381] env[62405]: DEBUG oslo_vmware.api [None req-2c7ecfdd-a9af-426e-9454-b86ca7fc01a1 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1948162, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.708659} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2049.756332] env[62405]: INFO nova.virt.vmwareapi.ds_util [None req-2c7ecfdd-a9af-426e-9454-b86ca7fc01a1 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 81d9be97-9147-4754-80c2-68c1a389842e/e6bba7a8-c2de-41dc-871a-3859bba5f4f9-rescue.vmdk. [ 2049.757380] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-566c2c7c-0ad5-4e1d-9ab1-757373d4ca4a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2049.787451] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-2c7ecfdd-a9af-426e-9454-b86ca7fc01a1 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Reconfiguring VM instance instance-0000006a to attach disk [datastore1] 81d9be97-9147-4754-80c2-68c1a389842e/e6bba7a8-c2de-41dc-871a-3859bba5f4f9-rescue.vmdk or device None with type thin {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2049.792490] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b0fd3655-a9ab-4f55-8494-b3ee3deadcb5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2049.818613] env[62405]: DEBUG oslo_vmware.api [None req-bc01e767-50cc-489b-9c16-7498d4b15bf9 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52adada6-72bb-303c-9679-28a10fb5d2f7, 'name': SearchDatastore_Task, 'duration_secs': 0.082506} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2049.823416] env[62405]: DEBUG oslo_vmware.api [None req-2c7ecfdd-a9af-426e-9454-b86ca7fc01a1 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Waiting for the task: (returnval){ [ 2049.823416] env[62405]: value = "task-1948164" [ 2049.823416] env[62405]: _type = "Task" [ 2049.823416] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2049.823900] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6d1c9e9a-3a2f-4d56-8f03-3c688e61e405 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2049.835239] env[62405]: DEBUG oslo_vmware.api [None req-2c7ecfdd-a9af-426e-9454-b86ca7fc01a1 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1948164, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2049.839145] env[62405]: DEBUG oslo_vmware.api [None req-bc01e767-50cc-489b-9c16-7498d4b15bf9 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Waiting for the task: (returnval){ [ 2049.839145] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]525b832c-8b44-028c-79d5-3fd6d6bef62f" [ 2049.839145] env[62405]: _type = "Task" [ 2049.839145] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2049.852674] env[62405]: DEBUG oslo_vmware.api [None req-bc01e767-50cc-489b-9c16-7498d4b15bf9 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]525b832c-8b44-028c-79d5-3fd6d6bef62f, 'name': SearchDatastore_Task, 'duration_secs': 0.010499} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2049.853156] env[62405]: DEBUG oslo_concurrency.lockutils [None req-bc01e767-50cc-489b-9c16-7498d4b15bf9 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2049.853294] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc01e767-50cc-489b-9c16-7498d4b15bf9 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 58691f22-5acd-45db-b587-df784a000813/e6bba7a8-c2de-41dc-871a-3859bba5f4f9-rescue.vmdk. {{(pid=62405) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 2049.853567] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8611e3e6-6aa3-403d-acd5-77ba9f9749db {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2049.861754] env[62405]: DEBUG oslo_vmware.api [None req-bc01e767-50cc-489b-9c16-7498d4b15bf9 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Waiting for the task: (returnval){ [ 2049.861754] env[62405]: value = "task-1948165" [ 2049.861754] env[62405]: _type = "Task" [ 2049.861754] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2049.874700] env[62405]: DEBUG oslo_vmware.api [None req-bc01e767-50cc-489b-9c16-7498d4b15bf9 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Task: {'id': task-1948165, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2049.910868] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d67a1cbb-c007-4b0b-b2d5-1f48e2fb70b3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2049.919855] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52513f97-582e-4105-baae-da1a0481f194 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2049.952571] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7142c881-6277-4bbc-af01-badfe860865f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2049.962316] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-750ff7b1-2dab-4e52-87cd-7bac7c92a0ec {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2050.757554] env[62405]: DEBUG nova.compute.provider_tree [None req-cbdd435f-fbb9-4c9a-a640-a07f5ec6da82 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2050.762021] env[62405]: DEBUG nova.network.neutron [None req-6c6ca6f9-5da5-4615-889d-d4a7d9e344fa tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Updating instance_info_cache with network_info: [{"id": "2026016a-87b1-42ae-a04f-d95c5fb37377", "address": "fa:16:3e:bc:e8:85", "network": {"id": "3ca0a5a2-a18f-47fa-9d7b-0e27aa414878", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1312490542-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.142", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f3b50cc219314108945bfc8b2c21849a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7edb7c08-2fae-4df5-9ec6-5ccf06d7e337", "external-id": "nsx-vlan-transportzone-309", "segmentation_id": 309, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2026016a-87", "ovs_interfaceid": "2026016a-87b1-42ae-a04f-d95c5fb37377", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2050.770675] env[62405]: DEBUG nova.scheduler.client.report [None req-cbdd435f-fbb9-4c9a-a640-a07f5ec6da82 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2050.777025] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Acquiring lock "60ccb9f6-29ba-44eb-8cec-0d9b78c235ec" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2050.777025] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Lock "60ccb9f6-29ba-44eb-8cec-0d9b78c235ec" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2050.780746] env[62405]: DEBUG oslo_vmware.api [None req-2c7ecfdd-a9af-426e-9454-b86ca7fc01a1 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1948164, 'name': ReconfigVM_Task, 'duration_secs': 0.493601} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2050.783849] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-2c7ecfdd-a9af-426e-9454-b86ca7fc01a1 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Reconfigured VM instance instance-0000006a to attach disk [datastore1] 81d9be97-9147-4754-80c2-68c1a389842e/e6bba7a8-c2de-41dc-871a-3859bba5f4f9-rescue.vmdk or device None with type thin {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2050.784458] env[62405]: DEBUG oslo_vmware.api [None req-bc01e767-50cc-489b-9c16-7498d4b15bf9 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Task: {'id': task-1948165, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.631154} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2050.785134] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71c76f7e-e2b2-45f9-b90f-3de93fe653b5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2050.787498] env[62405]: INFO nova.virt.vmwareapi.ds_util [None req-bc01e767-50cc-489b-9c16-7498d4b15bf9 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 58691f22-5acd-45db-b587-df784a000813/e6bba7a8-c2de-41dc-871a-3859bba5f4f9-rescue.vmdk. [ 2050.788857] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6ea93e1-feb2-43f3-867f-635bca0a2b3d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2050.812795] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-bc01e767-50cc-489b-9c16-7498d4b15bf9 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: 58691f22-5acd-45db-b587-df784a000813] Reconfiguring VM instance instance-00000070 to attach disk [datastore1] 58691f22-5acd-45db-b587-df784a000813/e6bba7a8-c2de-41dc-871a-3859bba5f4f9-rescue.vmdk or device None with type thin {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2050.830445] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d297928d-3ca4-43fe-bd99-0df89b38758d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2050.847323] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-836ae3c3-126c-4a9c-99a4-c378f4b8142d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2050.862278] env[62405]: DEBUG oslo_vmware.api [None req-2c7ecfdd-a9af-426e-9454-b86ca7fc01a1 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Waiting for the task: (returnval){ [ 2050.862278] env[62405]: value = "task-1948167" [ 2050.862278] env[62405]: _type = "Task" [ 2050.862278] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2050.864033] env[62405]: DEBUG oslo_vmware.api [None req-bc01e767-50cc-489b-9c16-7498d4b15bf9 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Waiting for the task: (returnval){ [ 2050.864033] env[62405]: value = "task-1948166" [ 2050.864033] env[62405]: _type = "Task" [ 2050.864033] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2050.874861] env[62405]: DEBUG oslo_vmware.api [None req-2c7ecfdd-a9af-426e-9454-b86ca7fc01a1 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1948167, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2050.877882] env[62405]: DEBUG oslo_vmware.api [None req-bc01e767-50cc-489b-9c16-7498d4b15bf9 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Task: {'id': task-1948166, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2051.270884] env[62405]: DEBUG oslo_concurrency.lockutils [None req-6c6ca6f9-5da5-4615-889d-d4a7d9e344fa tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Releasing lock "refresh_cache-c39d9059-8da4-4c8d-99ab-d66b8445e7da" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2051.271947] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed48ce13-099d-4e45-942e-067b0663974c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2051.276517] env[62405]: DEBUG oslo_concurrency.lockutils [None req-cbdd435f-fbb9-4c9a-a640-a07f5ec6da82 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.131s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2051.278496] env[62405]: DEBUG nova.compute.manager [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2051.285018] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 7.203s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2051.285018] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2051.285018] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62405) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2051.285018] env[62405]: DEBUG oslo_concurrency.lockutils [None req-eb10086d-a8c6-46c3-9f57-3125ebe3729b tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.529s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2051.285018] env[62405]: INFO nova.compute.claims [None req-eb10086d-a8c6-46c3-9f57-3125ebe3729b tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2051.288185] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d960e69-6d48-4eb5-8a9c-7fbbe65165e2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2051.291243] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0862d719-f8a4-4053-a506-7f8d4329a2c0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2051.301309] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fae16821-3d41-43ce-b75d-01e63c29fbee {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2051.322184] env[62405]: INFO nova.scheduler.client.report [None req-cbdd435f-fbb9-4c9a-a640-a07f5ec6da82 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Deleted allocations for instance 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d [ 2051.323730] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f89d381c-a14f-46ab-a135-53d812c52f1f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2051.335201] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6c8b7c6-6410-4b6d-abe0-375989041fb3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2051.368505] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179121MB free_disk=170GB free_vcpus=48 pci_devices=None {{(pid=62405) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2051.368786] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2051.382659] env[62405]: DEBUG oslo_vmware.api [None req-bc01e767-50cc-489b-9c16-7498d4b15bf9 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Task: {'id': task-1948166, 'name': ReconfigVM_Task, 'duration_secs': 0.287667} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2051.385767] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-bc01e767-50cc-489b-9c16-7498d4b15bf9 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: 58691f22-5acd-45db-b587-df784a000813] Reconfigured VM instance instance-00000070 to attach disk [datastore1] 58691f22-5acd-45db-b587-df784a000813/e6bba7a8-c2de-41dc-871a-3859bba5f4f9-rescue.vmdk or device None with type thin {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2051.385993] env[62405]: DEBUG oslo_vmware.api [None req-2c7ecfdd-a9af-426e-9454-b86ca7fc01a1 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1948167, 'name': ReconfigVM_Task, 'duration_secs': 0.162465} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2051.387257] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a95e76f5-2ce8-4909-9110-855e865e2d93 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2051.390732] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c7ecfdd-a9af-426e-9454-b86ca7fc01a1 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2051.390732] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-daeb3611-ca2b-464c-bfbd-5453e814ca48 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2051.416241] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7346165b-9486-4dff-bb19-519fdacee0f1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2051.426255] env[62405]: DEBUG oslo_vmware.api [None req-2c7ecfdd-a9af-426e-9454-b86ca7fc01a1 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Waiting for the task: (returnval){ [ 2051.426255] env[62405]: value = "task-1948168" [ 2051.426255] env[62405]: _type = "Task" [ 2051.426255] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2051.431889] env[62405]: DEBUG oslo_vmware.api [None req-bc01e767-50cc-489b-9c16-7498d4b15bf9 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Waiting for the task: (returnval){ [ 2051.431889] env[62405]: value = "task-1948169" [ 2051.431889] env[62405]: _type = "Task" [ 2051.431889] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2051.435286] env[62405]: DEBUG oslo_vmware.api [None req-2c7ecfdd-a9af-426e-9454-b86ca7fc01a1 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1948168, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2051.445295] env[62405]: DEBUG oslo_vmware.api [None req-bc01e767-50cc-489b-9c16-7498d4b15bf9 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Task: {'id': task-1948169, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2051.819268] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2051.833447] env[62405]: DEBUG oslo_concurrency.lockutils [None req-cbdd435f-fbb9-4c9a-a640-a07f5ec6da82 tempest-ServersAdminTestJSON-1517572684 tempest-ServersAdminTestJSON-1517572684-project-member] Lock "78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.860s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2051.937577] env[62405]: DEBUG oslo_vmware.api [None req-2c7ecfdd-a9af-426e-9454-b86ca7fc01a1 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1948168, 'name': PowerOnVM_Task, 'duration_secs': 0.436705} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2051.940869] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c7ecfdd-a9af-426e-9454-b86ca7fc01a1 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2051.944388] env[62405]: DEBUG nova.compute.manager [None req-2c7ecfdd-a9af-426e-9454-b86ca7fc01a1 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2051.946063] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43599212-d301-42bf-9821-e21a8130ee76 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2051.952127] env[62405]: DEBUG oslo_vmware.api [None req-bc01e767-50cc-489b-9c16-7498d4b15bf9 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Task: {'id': task-1948169, 'name': ReconfigVM_Task, 'duration_secs': 0.201206} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2051.952753] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc01e767-50cc-489b-9c16-7498d4b15bf9 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: 58691f22-5acd-45db-b587-df784a000813] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2051.952998] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b8916e52-6417-400e-9245-7c54527584d3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2051.962813] env[62405]: DEBUG oslo_vmware.api [None req-bc01e767-50cc-489b-9c16-7498d4b15bf9 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Waiting for the task: (returnval){ [ 2051.962813] env[62405]: value = "task-1948170" [ 2051.962813] env[62405]: _type = "Task" [ 2051.962813] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2051.971371] env[62405]: DEBUG oslo_vmware.api [None req-bc01e767-50cc-489b-9c16-7498d4b15bf9 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Task: {'id': task-1948170, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2052.409158] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c6ca6f9-5da5-4615-889d-d4a7d9e344fa tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2052.409656] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a14d290f-1495-4035-9fa7-d70377cf3959 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2052.417266] env[62405]: DEBUG oslo_vmware.api [None req-6c6ca6f9-5da5-4615-889d-d4a7d9e344fa tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for the task: (returnval){ [ 2052.417266] env[62405]: value = "task-1948171" [ 2052.417266] env[62405]: _type = "Task" [ 2052.417266] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2052.436550] env[62405]: DEBUG oslo_vmware.api [None req-6c6ca6f9-5da5-4615-889d-d4a7d9e344fa tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948171, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2052.475377] env[62405]: DEBUG oslo_vmware.api [None req-bc01e767-50cc-489b-9c16-7498d4b15bf9 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Task: {'id': task-1948170, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2052.519621] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ad7637b-1f89-4d56-8ff9-8e35f0c47abd {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2052.529565] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d945b249-d11a-4d09-91e1-2fb84590ed66 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2052.561847] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1c1ef9f-468e-45bd-8812-960584608cba {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2052.571153] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81d1086b-013c-497b-9cf4-2d2a7c1f5467 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2052.585975] env[62405]: DEBUG nova.compute.provider_tree [None req-eb10086d-a8c6-46c3-9f57-3125ebe3729b tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2052.927099] env[62405]: DEBUG oslo_vmware.api [None req-6c6ca6f9-5da5-4615-889d-d4a7d9e344fa tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948171, 'name': PowerOffVM_Task, 'duration_secs': 0.347447} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2052.927487] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c6ca6f9-5da5-4615-889d-d4a7d9e344fa tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2052.928223] env[62405]: DEBUG nova.virt.hardware [None req-6c6ca6f9-5da5-4615-889d-d4a7d9e344fa tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2052.928337] env[62405]: DEBUG nova.virt.hardware [None req-6c6ca6f9-5da5-4615-889d-d4a7d9e344fa tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2052.928497] env[62405]: DEBUG nova.virt.hardware [None req-6c6ca6f9-5da5-4615-889d-d4a7d9e344fa tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2052.928683] env[62405]: DEBUG nova.virt.hardware [None req-6c6ca6f9-5da5-4615-889d-d4a7d9e344fa tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2052.928834] env[62405]: DEBUG nova.virt.hardware [None req-6c6ca6f9-5da5-4615-889d-d4a7d9e344fa tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2052.928970] env[62405]: DEBUG nova.virt.hardware [None req-6c6ca6f9-5da5-4615-889d-d4a7d9e344fa tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2052.929271] env[62405]: DEBUG nova.virt.hardware [None req-6c6ca6f9-5da5-4615-889d-d4a7d9e344fa tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2052.929345] env[62405]: DEBUG nova.virt.hardware [None req-6c6ca6f9-5da5-4615-889d-d4a7d9e344fa tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2052.930473] env[62405]: DEBUG nova.virt.hardware [None req-6c6ca6f9-5da5-4615-889d-d4a7d9e344fa tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2052.930473] env[62405]: DEBUG nova.virt.hardware [None req-6c6ca6f9-5da5-4615-889d-d4a7d9e344fa tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2052.930473] env[62405]: DEBUG nova.virt.hardware [None req-6c6ca6f9-5da5-4615-889d-d4a7d9e344fa tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2052.934802] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-03b69163-11b8-4908-9b96-917c9abf82c4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2052.950544] env[62405]: DEBUG oslo_vmware.api [None req-6c6ca6f9-5da5-4615-889d-d4a7d9e344fa tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for the task: (returnval){ [ 2052.950544] env[62405]: value = "task-1948172" [ 2052.950544] env[62405]: _type = "Task" [ 2052.950544] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2052.959904] env[62405]: DEBUG oslo_vmware.api [None req-6c6ca6f9-5da5-4615-889d-d4a7d9e344fa tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948172, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2052.972437] env[62405]: DEBUG oslo_vmware.api [None req-bc01e767-50cc-489b-9c16-7498d4b15bf9 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Task: {'id': task-1948170, 'name': PowerOnVM_Task, 'duration_secs': 0.733873} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2052.972809] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc01e767-50cc-489b-9c16-7498d4b15bf9 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: 58691f22-5acd-45db-b587-df784a000813] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2052.976052] env[62405]: DEBUG nova.compute.manager [None req-bc01e767-50cc-489b-9c16-7498d4b15bf9 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: 58691f22-5acd-45db-b587-df784a000813] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2052.976877] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-030bd0b6-7f41-45e4-8325-4245b68e4aad {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2053.089735] env[62405]: DEBUG nova.scheduler.client.report [None req-eb10086d-a8c6-46c3-9f57-3125ebe3729b tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2053.221024] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2dd168af-0bc3-4c07-9d00-1f1141995121 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Acquiring lock "34f4f278-bd4d-43f9-af83-adb48cfb0adc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2053.221024] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2dd168af-0bc3-4c07-9d00-1f1141995121 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Lock "34f4f278-bd4d-43f9-af83-adb48cfb0adc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2053.462102] env[62405]: DEBUG oslo_vmware.api [None req-6c6ca6f9-5da5-4615-889d-d4a7d9e344fa tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948172, 'name': ReconfigVM_Task, 'duration_secs': 0.165248} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2053.463408] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-501491e0-78ba-49e9-90c7-fb21b92ca3ca {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2053.487049] env[62405]: DEBUG nova.virt.hardware [None req-6c6ca6f9-5da5-4615-889d-d4a7d9e344fa tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2053.487049] env[62405]: DEBUG nova.virt.hardware [None req-6c6ca6f9-5da5-4615-889d-d4a7d9e344fa tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2053.487049] env[62405]: DEBUG nova.virt.hardware [None req-6c6ca6f9-5da5-4615-889d-d4a7d9e344fa tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2053.487342] env[62405]: DEBUG nova.virt.hardware [None req-6c6ca6f9-5da5-4615-889d-d4a7d9e344fa tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2053.487342] env[62405]: DEBUG nova.virt.hardware [None req-6c6ca6f9-5da5-4615-889d-d4a7d9e344fa tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2053.487434] env[62405]: DEBUG nova.virt.hardware [None req-6c6ca6f9-5da5-4615-889d-d4a7d9e344fa tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2053.487655] env[62405]: DEBUG nova.virt.hardware [None req-6c6ca6f9-5da5-4615-889d-d4a7d9e344fa tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2053.487828] env[62405]: DEBUG nova.virt.hardware [None req-6c6ca6f9-5da5-4615-889d-d4a7d9e344fa tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2053.487996] env[62405]: DEBUG nova.virt.hardware [None req-6c6ca6f9-5da5-4615-889d-d4a7d9e344fa tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2053.488197] env[62405]: DEBUG nova.virt.hardware [None req-6c6ca6f9-5da5-4615-889d-d4a7d9e344fa tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2053.488399] env[62405]: DEBUG nova.virt.hardware [None req-6c6ca6f9-5da5-4615-889d-d4a7d9e344fa tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2053.491301] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d773c701-64f6-429a-9592-9c34be64482e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2053.498272] env[62405]: DEBUG oslo_vmware.api [None req-6c6ca6f9-5da5-4615-889d-d4a7d9e344fa tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for the task: (returnval){ [ 2053.498272] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52816814-c00f-27d1-b08a-f98345463a39" [ 2053.498272] env[62405]: _type = "Task" [ 2053.498272] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2053.506197] env[62405]: DEBUG oslo_vmware.api [None req-6c6ca6f9-5da5-4615-889d-d4a7d9e344fa tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52816814-c00f-27d1-b08a-f98345463a39, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2053.595914] env[62405]: DEBUG oslo_concurrency.lockutils [None req-eb10086d-a8c6-46c3-9f57-3125ebe3729b tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.312s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2053.596461] env[62405]: DEBUG nova.compute.manager [None req-eb10086d-a8c6-46c3-9f57-3125ebe3729b tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2053.599176] env[62405]: DEBUG oslo_concurrency.lockutils [None req-113f41d3-775b-47ff-8f3f-a736dbd5ade1 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.593s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2053.599400] env[62405]: DEBUG nova.objects.instance [None req-113f41d3-775b-47ff-8f3f-a736dbd5ade1 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Lazy-loading 'resources' on Instance uuid 2e6ca3c8-d7e1-4a5d-9fdd-aa944fd9c2f8 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2053.682389] env[62405]: INFO nova.compute.manager [None req-2642aada-aefe-4b8e-877a-58c44daf7966 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Unrescuing [ 2053.682670] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2642aada-aefe-4b8e-877a-58c44daf7966 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Acquiring lock "refresh_cache-81d9be97-9147-4754-80c2-68c1a389842e" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2053.683086] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2642aada-aefe-4b8e-877a-58c44daf7966 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Acquired lock "refresh_cache-81d9be97-9147-4754-80c2-68c1a389842e" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2053.683086] env[62405]: DEBUG nova.network.neutron [None req-2642aada-aefe-4b8e-877a-58c44daf7966 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2053.726018] env[62405]: DEBUG nova.compute.manager [None req-2dd168af-0bc3-4c07-9d00-1f1141995121 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] [instance: 34f4f278-bd4d-43f9-af83-adb48cfb0adc] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2053.957861] env[62405]: INFO nova.compute.manager [None req-4d020111-e2d9-4a53-b200-f6eae07efaf3 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: 58691f22-5acd-45db-b587-df784a000813] Unrescuing [ 2053.958242] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4d020111-e2d9-4a53-b200-f6eae07efaf3 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Acquiring lock "refresh_cache-58691f22-5acd-45db-b587-df784a000813" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2053.958309] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4d020111-e2d9-4a53-b200-f6eae07efaf3 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Acquired lock "refresh_cache-58691f22-5acd-45db-b587-df784a000813" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2053.958469] env[62405]: DEBUG nova.network.neutron [None req-4d020111-e2d9-4a53-b200-f6eae07efaf3 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: 58691f22-5acd-45db-b587-df784a000813] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2054.007730] env[62405]: DEBUG oslo_vmware.api [None req-6c6ca6f9-5da5-4615-889d-d4a7d9e344fa tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52816814-c00f-27d1-b08a-f98345463a39, 'name': SearchDatastore_Task, 'duration_secs': 0.008134} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2054.013047] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-6c6ca6f9-5da5-4615-889d-d4a7d9e344fa tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Reconfiguring VM instance instance-00000057 to detach disk 2000 {{(pid=62405) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2054.013342] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a5fb9035-26f1-4b2e-854f-97e794364ca1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2054.033218] env[62405]: DEBUG oslo_vmware.api [None req-6c6ca6f9-5da5-4615-889d-d4a7d9e344fa tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for the task: (returnval){ [ 2054.033218] env[62405]: value = "task-1948173" [ 2054.033218] env[62405]: _type = "Task" [ 2054.033218] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2054.041407] env[62405]: DEBUG oslo_vmware.api [None req-6c6ca6f9-5da5-4615-889d-d4a7d9e344fa tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948173, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2054.104107] env[62405]: DEBUG nova.compute.utils [None req-eb10086d-a8c6-46c3-9f57-3125ebe3729b tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2054.108298] env[62405]: DEBUG nova.compute.manager [None req-eb10086d-a8c6-46c3-9f57-3125ebe3729b tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2054.108298] env[62405]: DEBUG nova.network.neutron [None req-eb10086d-a8c6-46c3-9f57-3125ebe3729b tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2054.158412] env[62405]: DEBUG nova.policy [None req-eb10086d-a8c6-46c3-9f57-3125ebe3729b tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2f003aed5f864a8f933767606ae1f317', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '633b4e729a054bc69593b789af9ee070', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 2054.254451] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2dd168af-0bc3-4c07-9d00-1f1141995121 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2054.320076] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afd3f1c7-f923-4646-abb3-c3ed13a62a4f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2054.327443] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1262e26-6ad6-4539-9c21-c0d82b9a4de3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2054.357704] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb2a4f17-ec9f-416b-ab76-0cced99ea5df {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2054.367495] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-422ff9ee-7717-4a05-a666-523e66ad2564 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2054.381766] env[62405]: DEBUG nova.compute.provider_tree [None req-113f41d3-775b-47ff-8f3f-a736dbd5ade1 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2054.544645] env[62405]: DEBUG oslo_vmware.api [None req-6c6ca6f9-5da5-4615-889d-d4a7d9e344fa tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948173, 'name': ReconfigVM_Task, 'duration_secs': 0.201854} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2054.544645] env[62405]: DEBUG nova.network.neutron [None req-2642aada-aefe-4b8e-877a-58c44daf7966 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Updating instance_info_cache with network_info: [{"id": "2ba16494-2db9-4083-9a27-d4f12dac6ba1", "address": "fa:16:3e:66:df:57", "network": {"id": "bb161d6c-1986-486e-a6b7-5b1dacc985ee", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-590658377-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.213", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7541d8c77a3f434094bc30a4d402bfcb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ba16494-2d", "ovs_interfaceid": "2ba16494-2db9-4083-9a27-d4f12dac6ba1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2054.545897] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-6c6ca6f9-5da5-4615-889d-d4a7d9e344fa tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Reconfigured VM instance instance-00000057 to detach disk 2000 {{(pid=62405) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2054.546699] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e7f2f69-5eb8-4190-8ea7-9a80a260ee97 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2054.579451] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-6c6ca6f9-5da5-4615-889d-d4a7d9e344fa tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Reconfiguring VM instance instance-00000057 to attach disk [datastore1] c39d9059-8da4-4c8d-99ab-d66b8445e7da/c39d9059-8da4-4c8d-99ab-d66b8445e7da.vmdk or device None with type thin {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2054.580449] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cb8cf2db-450d-4f3d-99cb-eca23c2b4a14 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2054.606789] env[62405]: DEBUG oslo_vmware.api [None req-6c6ca6f9-5da5-4615-889d-d4a7d9e344fa tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for the task: (returnval){ [ 2054.606789] env[62405]: value = "task-1948174" [ 2054.606789] env[62405]: _type = "Task" [ 2054.606789] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2054.610172] env[62405]: DEBUG nova.compute.manager [None req-eb10086d-a8c6-46c3-9f57-3125ebe3729b tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2054.620628] env[62405]: DEBUG oslo_vmware.api [None req-6c6ca6f9-5da5-4615-889d-d4a7d9e344fa tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948174, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2054.669842] env[62405]: DEBUG nova.network.neutron [None req-eb10086d-a8c6-46c3-9f57-3125ebe3729b tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] Successfully created port: 3d9e960f-b38a-4714-93c0-7ff8857554fe {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2054.884585] env[62405]: DEBUG nova.scheduler.client.report [None req-113f41d3-775b-47ff-8f3f-a736dbd5ade1 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2055.050654] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2642aada-aefe-4b8e-877a-58c44daf7966 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Releasing lock "refresh_cache-81d9be97-9147-4754-80c2-68c1a389842e" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2055.052090] env[62405]: DEBUG nova.objects.instance [None req-2642aada-aefe-4b8e-877a-58c44daf7966 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Lazy-loading 'flavor' on Instance uuid 81d9be97-9147-4754-80c2-68c1a389842e {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2055.080210] env[62405]: DEBUG nova.network.neutron [None req-4d020111-e2d9-4a53-b200-f6eae07efaf3 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: 58691f22-5acd-45db-b587-df784a000813] Updating instance_info_cache with network_info: [{"id": "c38487c8-b41a-4c0c-8103-3392186dbdee", "address": "fa:16:3e:66:a1:15", "network": {"id": "006b4fbf-fefb-47b8-b2e9-30e8308e87b9", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-369344299-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "4a78c04608454ac88ecb97b4c87a9d17", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6815237d-f565-474d-a3c0-9c675478eb00", "external-id": "nsx-vlan-transportzone-526", "segmentation_id": 526, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc38487c8-b4", "ovs_interfaceid": "c38487c8-b41a-4c0c-8103-3392186dbdee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2055.125446] env[62405]: DEBUG oslo_vmware.api [None req-6c6ca6f9-5da5-4615-889d-d4a7d9e344fa tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948174, 'name': ReconfigVM_Task, 'duration_secs': 0.322888} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2055.125814] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-6c6ca6f9-5da5-4615-889d-d4a7d9e344fa tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Reconfigured VM instance instance-00000057 to attach disk [datastore1] c39d9059-8da4-4c8d-99ab-d66b8445e7da/c39d9059-8da4-4c8d-99ab-d66b8445e7da.vmdk or device None with type thin {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2055.126990] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32c9556a-19bc-42ff-90be-33af17581d65 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2055.159368] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cf9a52c-2339-44f5-89b2-639a7462c3d7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2055.181053] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b8ea23f-10c6-4f24-97bf-67faf8657283 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2055.203223] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e6f1d0b-4711-4090-9929-9e4a7530c358 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2055.211698] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c6ca6f9-5da5-4615-889d-d4a7d9e344fa tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2055.211917] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-12971241-350b-4821-8387-843459eaa97a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2055.219083] env[62405]: DEBUG oslo_vmware.api [None req-6c6ca6f9-5da5-4615-889d-d4a7d9e344fa tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for the task: (returnval){ [ 2055.219083] env[62405]: value = "task-1948175" [ 2055.219083] env[62405]: _type = "Task" [ 2055.219083] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2055.228158] env[62405]: DEBUG oslo_vmware.api [None req-6c6ca6f9-5da5-4615-889d-d4a7d9e344fa tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948175, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2055.390651] env[62405]: DEBUG oslo_concurrency.lockutils [None req-113f41d3-775b-47ff-8f3f-a736dbd5ade1 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.791s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2055.393127] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.992s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2055.395662] env[62405]: INFO nova.compute.claims [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 7c74cae9-1607-4928-a927-f0c8b86f7698] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2055.419837] env[62405]: INFO nova.scheduler.client.report [None req-113f41d3-775b-47ff-8f3f-a736dbd5ade1 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Deleted allocations for instance 2e6ca3c8-d7e1-4a5d-9fdd-aa944fd9c2f8 [ 2055.557502] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2451c76-f2f5-490d-a6e3-56395ee64f7d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2055.581487] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-2642aada-aefe-4b8e-877a-58c44daf7966 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2055.581697] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-71e5d17b-5ae4-48a1-9cb2-443f885af189 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2055.583668] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4d020111-e2d9-4a53-b200-f6eae07efaf3 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Releasing lock "refresh_cache-58691f22-5acd-45db-b587-df784a000813" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2055.584415] env[62405]: DEBUG nova.objects.instance [None req-4d020111-e2d9-4a53-b200-f6eae07efaf3 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Lazy-loading 'flavor' on Instance uuid 58691f22-5acd-45db-b587-df784a000813 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2055.591302] env[62405]: DEBUG oslo_vmware.api [None req-2642aada-aefe-4b8e-877a-58c44daf7966 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Waiting for the task: (returnval){ [ 2055.591302] env[62405]: value = "task-1948176" [ 2055.591302] env[62405]: _type = "Task" [ 2055.591302] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2055.599333] env[62405]: DEBUG oslo_vmware.api [None req-2642aada-aefe-4b8e-877a-58c44daf7966 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1948176, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2055.622141] env[62405]: DEBUG nova.compute.manager [None req-eb10086d-a8c6-46c3-9f57-3125ebe3729b tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2055.658625] env[62405]: DEBUG nova.virt.hardware [None req-eb10086d-a8c6-46c3-9f57-3125ebe3729b tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2055.659016] env[62405]: DEBUG nova.virt.hardware [None req-eb10086d-a8c6-46c3-9f57-3125ebe3729b tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2055.659238] env[62405]: DEBUG nova.virt.hardware [None req-eb10086d-a8c6-46c3-9f57-3125ebe3729b tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2055.659479] env[62405]: DEBUG nova.virt.hardware [None req-eb10086d-a8c6-46c3-9f57-3125ebe3729b tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2055.659778] env[62405]: DEBUG nova.virt.hardware [None req-eb10086d-a8c6-46c3-9f57-3125ebe3729b tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2055.659997] env[62405]: DEBUG nova.virt.hardware [None req-eb10086d-a8c6-46c3-9f57-3125ebe3729b tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2055.660276] env[62405]: DEBUG nova.virt.hardware [None req-eb10086d-a8c6-46c3-9f57-3125ebe3729b tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2055.660489] env[62405]: DEBUG nova.virt.hardware [None req-eb10086d-a8c6-46c3-9f57-3125ebe3729b tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2055.660735] env[62405]: DEBUG nova.virt.hardware [None req-eb10086d-a8c6-46c3-9f57-3125ebe3729b tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2055.660971] env[62405]: DEBUG nova.virt.hardware [None req-eb10086d-a8c6-46c3-9f57-3125ebe3729b tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2055.661275] env[62405]: DEBUG nova.virt.hardware [None req-eb10086d-a8c6-46c3-9f57-3125ebe3729b tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2055.662387] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b33f2817-7b02-40c4-882e-3fb664a57123 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2055.671122] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-898da06b-e29d-4b6f-a555-86d912713028 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2055.728417] env[62405]: DEBUG oslo_vmware.api [None req-6c6ca6f9-5da5-4615-889d-d4a7d9e344fa tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948175, 'name': PowerOnVM_Task, 'duration_secs': 0.437083} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2055.728715] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c6ca6f9-5da5-4615-889d-d4a7d9e344fa tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2055.934041] env[62405]: DEBUG oslo_concurrency.lockutils [None req-113f41d3-775b-47ff-8f3f-a736dbd5ade1 tempest-ServerGroupTestJSON-494616593 tempest-ServerGroupTestJSON-494616593-project-member] Lock "2e6ca3c8-d7e1-4a5d-9fdd-aa944fd9c2f8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.462s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2056.090227] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2426afd6-b789-43a5-898f-bbf0b48f2496 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2056.102634] env[62405]: DEBUG oslo_vmware.api [None req-2642aada-aefe-4b8e-877a-58c44daf7966 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1948176, 'name': PowerOffVM_Task, 'duration_secs': 0.449978} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2056.119262] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-2642aada-aefe-4b8e-877a-58c44daf7966 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2056.125603] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-2642aada-aefe-4b8e-877a-58c44daf7966 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Reconfiguring VM instance instance-0000006a to detach disk 2002 {{(pid=62405) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2056.125993] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d020111-e2d9-4a53-b200-f6eae07efaf3 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: 58691f22-5acd-45db-b587-df784a000813] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2056.126320] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-12337bc7-5e73-4bad-9bbd-e42ee3d82618 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2056.139185] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a533151b-3800-40cf-8b16-924dc40a8ebd {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2056.145855] env[62405]: DEBUG oslo_vmware.api [None req-4d020111-e2d9-4a53-b200-f6eae07efaf3 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Waiting for the task: (returnval){ [ 2056.145855] env[62405]: value = "task-1948177" [ 2056.145855] env[62405]: _type = "Task" [ 2056.145855] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2056.147085] env[62405]: DEBUG oslo_vmware.api [None req-2642aada-aefe-4b8e-877a-58c44daf7966 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Waiting for the task: (returnval){ [ 2056.147085] env[62405]: value = "task-1948178" [ 2056.147085] env[62405]: _type = "Task" [ 2056.147085] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2056.160750] env[62405]: DEBUG oslo_vmware.api [None req-4d020111-e2d9-4a53-b200-f6eae07efaf3 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Task: {'id': task-1948177, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2056.491053] env[62405]: DEBUG nova.compute.manager [req-55cc8c2b-e6c3-43bc-bf44-c780a58a14ae req-8706c5bb-3dac-4c98-8c7c-cd1667bfa6bf service nova] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] Received event network-vif-plugged-3d9e960f-b38a-4714-93c0-7ff8857554fe {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2056.491279] env[62405]: DEBUG oslo_concurrency.lockutils [req-55cc8c2b-e6c3-43bc-bf44-c780a58a14ae req-8706c5bb-3dac-4c98-8c7c-cd1667bfa6bf service nova] Acquiring lock "f1e9a2e7-0fd3-4a89-8c33-bab6d1987230-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2056.491491] env[62405]: DEBUG oslo_concurrency.lockutils [req-55cc8c2b-e6c3-43bc-bf44-c780a58a14ae req-8706c5bb-3dac-4c98-8c7c-cd1667bfa6bf service nova] Lock "f1e9a2e7-0fd3-4a89-8c33-bab6d1987230-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2056.491681] env[62405]: DEBUG oslo_concurrency.lockutils [req-55cc8c2b-e6c3-43bc-bf44-c780a58a14ae req-8706c5bb-3dac-4c98-8c7c-cd1667bfa6bf service nova] Lock "f1e9a2e7-0fd3-4a89-8c33-bab6d1987230-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2056.491814] env[62405]: DEBUG nova.compute.manager [req-55cc8c2b-e6c3-43bc-bf44-c780a58a14ae req-8706c5bb-3dac-4c98-8c7c-cd1667bfa6bf service nova] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] No waiting events found dispatching network-vif-plugged-3d9e960f-b38a-4714-93c0-7ff8857554fe {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2056.491976] env[62405]: WARNING nova.compute.manager [req-55cc8c2b-e6c3-43bc-bf44-c780a58a14ae req-8706c5bb-3dac-4c98-8c7c-cd1667bfa6bf service nova] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] Received unexpected event network-vif-plugged-3d9e960f-b38a-4714-93c0-7ff8857554fe for instance with vm_state building and task_state spawning. [ 2056.595261] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cba7d1c-4c06-4d5d-875d-bb7c154d2b69 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2056.603055] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7e26c78-8398-4cb1-b67d-f0545250b88d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2056.607082] env[62405]: DEBUG nova.network.neutron [None req-eb10086d-a8c6-46c3-9f57-3125ebe3729b tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] Successfully updated port: 3d9e960f-b38a-4714-93c0-7ff8857554fe {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2056.636390] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8165511c-4eea-41ac-b3d2-b3bde11777ad {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2056.644542] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81102717-22f9-499a-8633-23a57f41e484 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2056.663397] env[62405]: DEBUG nova.compute.provider_tree [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2056.667671] env[62405]: DEBUG oslo_vmware.api [None req-4d020111-e2d9-4a53-b200-f6eae07efaf3 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Task: {'id': task-1948177, 'name': PowerOffVM_Task, 'duration_secs': 0.190724} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2056.668134] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d020111-e2d9-4a53-b200-f6eae07efaf3 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: 58691f22-5acd-45db-b587-df784a000813] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2056.673619] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d020111-e2d9-4a53-b200-f6eae07efaf3 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: 58691f22-5acd-45db-b587-df784a000813] Reconfiguring VM instance instance-00000070 to detach disk 2001 {{(pid=62405) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2056.676849] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a2613d3a-aa8d-4876-8126-2f1cb5c43444 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2056.689140] env[62405]: DEBUG oslo_vmware.api [None req-2642aada-aefe-4b8e-877a-58c44daf7966 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1948178, 'name': ReconfigVM_Task, 'duration_secs': 0.319428} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2056.689671] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-2642aada-aefe-4b8e-877a-58c44daf7966 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Reconfigured VM instance instance-0000006a to detach disk 2002 {{(pid=62405) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2056.689868] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-2642aada-aefe-4b8e-877a-58c44daf7966 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2056.690436] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-85b0e38b-2930-46e5-8ca0-fbd4c911be10 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2056.695444] env[62405]: DEBUG oslo_vmware.api [None req-4d020111-e2d9-4a53-b200-f6eae07efaf3 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Waiting for the task: (returnval){ [ 2056.695444] env[62405]: value = "task-1948179" [ 2056.695444] env[62405]: _type = "Task" [ 2056.695444] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2056.699419] env[62405]: DEBUG oslo_vmware.api [None req-2642aada-aefe-4b8e-877a-58c44daf7966 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Waiting for the task: (returnval){ [ 2056.699419] env[62405]: value = "task-1948180" [ 2056.699419] env[62405]: _type = "Task" [ 2056.699419] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2056.705494] env[62405]: DEBUG oslo_vmware.api [None req-4d020111-e2d9-4a53-b200-f6eae07efaf3 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Task: {'id': task-1948179, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2056.710186] env[62405]: DEBUG oslo_vmware.api [None req-2642aada-aefe-4b8e-877a-58c44daf7966 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1948180, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2056.774085] env[62405]: INFO nova.compute.manager [None req-6c6ca6f9-5da5-4615-889d-d4a7d9e344fa tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Updating instance to original state: 'active' [ 2057.111939] env[62405]: DEBUG oslo_concurrency.lockutils [None req-eb10086d-a8c6-46c3-9f57-3125ebe3729b tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Acquiring lock "refresh_cache-f1e9a2e7-0fd3-4a89-8c33-bab6d1987230" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2057.112315] env[62405]: DEBUG oslo_concurrency.lockutils [None req-eb10086d-a8c6-46c3-9f57-3125ebe3729b tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Acquired lock "refresh_cache-f1e9a2e7-0fd3-4a89-8c33-bab6d1987230" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2057.112315] env[62405]: DEBUG nova.network.neutron [None req-eb10086d-a8c6-46c3-9f57-3125ebe3729b tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2057.169832] env[62405]: DEBUG nova.scheduler.client.report [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2057.206767] env[62405]: DEBUG oslo_vmware.api [None req-4d020111-e2d9-4a53-b200-f6eae07efaf3 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Task: {'id': task-1948179, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2057.211805] env[62405]: DEBUG oslo_vmware.api [None req-2642aada-aefe-4b8e-877a-58c44daf7966 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1948180, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2057.675510] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.282s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2057.676088] env[62405]: DEBUG nova.compute.manager [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 7c74cae9-1607-4928-a927-f0c8b86f7698] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2057.682568] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 6.311s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2057.682568] env[62405]: DEBUG nova.network.neutron [None req-eb10086d-a8c6-46c3-9f57-3125ebe3729b tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2057.708353] env[62405]: DEBUG oslo_vmware.api [None req-4d020111-e2d9-4a53-b200-f6eae07efaf3 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Task: {'id': task-1948179, 'name': ReconfigVM_Task, 'duration_secs': 0.838552} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2057.709289] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d020111-e2d9-4a53-b200-f6eae07efaf3 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: 58691f22-5acd-45db-b587-df784a000813] Reconfigured VM instance instance-00000070 to detach disk 2001 {{(pid=62405) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2057.709289] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d020111-e2d9-4a53-b200-f6eae07efaf3 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: 58691f22-5acd-45db-b587-df784a000813] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2057.711093] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c6e79b42-cfe7-44cd-92e3-f5dbc6341fb6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2057.714426] env[62405]: DEBUG oslo_vmware.api [None req-2642aada-aefe-4b8e-877a-58c44daf7966 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1948180, 'name': PowerOnVM_Task, 'duration_secs': 0.995914} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2057.715080] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-2642aada-aefe-4b8e-877a-58c44daf7966 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2057.715315] env[62405]: DEBUG nova.compute.manager [None req-2642aada-aefe-4b8e-877a-58c44daf7966 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2057.718387] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47d39d13-90dc-42f7-bf99-23489959b32f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2057.723720] env[62405]: DEBUG oslo_vmware.api [None req-4d020111-e2d9-4a53-b200-f6eae07efaf3 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Waiting for the task: (returnval){ [ 2057.723720] env[62405]: value = "task-1948181" [ 2057.723720] env[62405]: _type = "Task" [ 2057.723720] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2057.738625] env[62405]: DEBUG oslo_vmware.api [None req-4d020111-e2d9-4a53-b200-f6eae07efaf3 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Task: {'id': task-1948181, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2057.987713] env[62405]: DEBUG nova.network.neutron [None req-eb10086d-a8c6-46c3-9f57-3125ebe3729b tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] Updating instance_info_cache with network_info: [{"id": "3d9e960f-b38a-4714-93c0-7ff8857554fe", "address": "fa:16:3e:4e:fa:e7", "network": {"id": "2019f333-b70a-4976-97ee-8748220e1f48", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-558435229-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "633b4e729a054bc69593b789af9ee070", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb143ef7-8271-4a8a-a4aa-8eba9a89f6a1", "external-id": "nsx-vlan-transportzone-504", "segmentation_id": 504, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d9e960f-b3", "ovs_interfaceid": "3d9e960f-b38a-4714-93c0-7ff8857554fe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2058.181817] env[62405]: DEBUG nova.compute.utils [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2058.183206] env[62405]: DEBUG nova.compute.manager [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 7c74cae9-1607-4928-a927-f0c8b86f7698] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2058.183364] env[62405]: DEBUG nova.network.neutron [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 7c74cae9-1607-4928-a927-f0c8b86f7698] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2058.235460] env[62405]: DEBUG oslo_vmware.api [None req-4d020111-e2d9-4a53-b200-f6eae07efaf3 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Task: {'id': task-1948181, 'name': PowerOnVM_Task, 'duration_secs': 0.431297} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2058.235738] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d020111-e2d9-4a53-b200-f6eae07efaf3 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: 58691f22-5acd-45db-b587-df784a000813] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2058.235975] env[62405]: DEBUG nova.compute.manager [None req-4d020111-e2d9-4a53-b200-f6eae07efaf3 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: 58691f22-5acd-45db-b587-df784a000813] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2058.238598] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdff5931-9ee7-4dc6-90e5-3ac37dd198af {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2058.278998] env[62405]: DEBUG nova.policy [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '96a739701a824313b30b0d214f43757b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6014bab6bc9a4b059bab88e44b31f446', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 2058.490694] env[62405]: DEBUG oslo_concurrency.lockutils [None req-eb10086d-a8c6-46c3-9f57-3125ebe3729b tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Releasing lock "refresh_cache-f1e9a2e7-0fd3-4a89-8c33-bab6d1987230" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2058.491158] env[62405]: DEBUG nova.compute.manager [None req-eb10086d-a8c6-46c3-9f57-3125ebe3729b tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] Instance network_info: |[{"id": "3d9e960f-b38a-4714-93c0-7ff8857554fe", "address": "fa:16:3e:4e:fa:e7", "network": {"id": "2019f333-b70a-4976-97ee-8748220e1f48", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-558435229-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "633b4e729a054bc69593b789af9ee070", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb143ef7-8271-4a8a-a4aa-8eba9a89f6a1", "external-id": "nsx-vlan-transportzone-504", "segmentation_id": 504, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d9e960f-b3", "ovs_interfaceid": "3d9e960f-b38a-4714-93c0-7ff8857554fe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2058.491604] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-eb10086d-a8c6-46c3-9f57-3125ebe3729b tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4e:fa:e7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'fb143ef7-8271-4a8a-a4aa-8eba9a89f6a1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3d9e960f-b38a-4714-93c0-7ff8857554fe', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2058.505500] env[62405]: DEBUG oslo.service.loopingcall [None req-eb10086d-a8c6-46c3-9f57-3125ebe3729b tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2058.505500] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2058.505500] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e923f7c3-273d-4dce-b35b-47d503fa2d5e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2058.527980] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2058.527980] env[62405]: value = "task-1948182" [ 2058.527980] env[62405]: _type = "Task" [ 2058.527980] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2058.538457] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1948182, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2058.546054] env[62405]: DEBUG oslo_concurrency.lockutils [None req-11bb5068-76a3-46e0-9fac-6acb19c8e9d8 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Acquiring lock "c39d9059-8da4-4c8d-99ab-d66b8445e7da" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2058.546315] env[62405]: DEBUG oslo_concurrency.lockutils [None req-11bb5068-76a3-46e0-9fac-6acb19c8e9d8 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Lock "c39d9059-8da4-4c8d-99ab-d66b8445e7da" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2058.546531] env[62405]: DEBUG oslo_concurrency.lockutils [None req-11bb5068-76a3-46e0-9fac-6acb19c8e9d8 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Acquiring lock "c39d9059-8da4-4c8d-99ab-d66b8445e7da-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2058.546713] env[62405]: DEBUG oslo_concurrency.lockutils [None req-11bb5068-76a3-46e0-9fac-6acb19c8e9d8 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Lock "c39d9059-8da4-4c8d-99ab-d66b8445e7da-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2058.546885] env[62405]: DEBUG oslo_concurrency.lockutils [None req-11bb5068-76a3-46e0-9fac-6acb19c8e9d8 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Lock "c39d9059-8da4-4c8d-99ab-d66b8445e7da-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2058.549214] env[62405]: INFO nova.compute.manager [None req-11bb5068-76a3-46e0-9fac-6acb19c8e9d8 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Terminating instance [ 2058.560526] env[62405]: DEBUG nova.compute.manager [req-cc9533fa-6e47-4b47-a7b6-fd43d10ad95e req-c8b2f2b9-8726-4a5a-b838-03b8cba1d13a service nova] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] Received event network-changed-3d9e960f-b38a-4714-93c0-7ff8857554fe {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2058.560733] env[62405]: DEBUG nova.compute.manager [req-cc9533fa-6e47-4b47-a7b6-fd43d10ad95e req-c8b2f2b9-8726-4a5a-b838-03b8cba1d13a service nova] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] Refreshing instance network info cache due to event network-changed-3d9e960f-b38a-4714-93c0-7ff8857554fe. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 2058.560946] env[62405]: DEBUG oslo_concurrency.lockutils [req-cc9533fa-6e47-4b47-a7b6-fd43d10ad95e req-c8b2f2b9-8726-4a5a-b838-03b8cba1d13a service nova] Acquiring lock "refresh_cache-f1e9a2e7-0fd3-4a89-8c33-bab6d1987230" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2058.561104] env[62405]: DEBUG oslo_concurrency.lockutils [req-cc9533fa-6e47-4b47-a7b6-fd43d10ad95e req-c8b2f2b9-8726-4a5a-b838-03b8cba1d13a service nova] Acquired lock "refresh_cache-f1e9a2e7-0fd3-4a89-8c33-bab6d1987230" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2058.561273] env[62405]: DEBUG nova.network.neutron [req-cc9533fa-6e47-4b47-a7b6-fd43d10ad95e req-c8b2f2b9-8726-4a5a-b838-03b8cba1d13a service nova] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] Refreshing network info cache for port 3d9e960f-b38a-4714-93c0-7ff8857554fe {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2058.687072] env[62405]: DEBUG nova.compute.manager [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 7c74cae9-1607-4928-a927-f0c8b86f7698] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2058.721337] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance b495f9e6-60c8-4509-a34f-2e7ed59b6d82 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2058.721494] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance a91a6d04-2ec0-4568-bdb3-732d148644de actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2058.721619] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 0d2b305d-d754-413c-afdf-3a2e8f143891 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2058.721742] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 81d9be97-9147-4754-80c2-68c1a389842e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2058.722018] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance b2eae940-22bc-4c87-842f-30fbd04eba28 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2058.722018] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 6fcfada3-d73a-4814-bf45-d34b26d76d4a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2058.722136] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance c39d9059-8da4-4c8d-99ab-d66b8445e7da actively managed on this compute host and has allocations in placement: {'resources': {'VCPU': 1, 'MEMORY_MB': 192, 'DISK_GB': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2058.722269] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 46b794f6-e858-45e6-9977-98ab246482f3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2058.722394] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 58691f22-5acd-45db-b587-df784a000813 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2058.722508] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance f1e9a2e7-0fd3-4a89-8c33-bab6d1987230 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2058.722619] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 7c74cae9-1607-4928-a927-f0c8b86f7698 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2058.848912] env[62405]: DEBUG nova.network.neutron [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 7c74cae9-1607-4928-a927-f0c8b86f7698] Successfully created port: afd2e7f4-e21f-433e-ab9a-fb2e5d1e3993 {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2058.979078] env[62405]: DEBUG nova.compute.manager [req-1b56498e-d667-4c2d-9b88-5876bc2b0c58 req-bd29e0f4-6e84-4807-8d85-4356db0b3eeb service nova] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Received event network-changed-2ba16494-2db9-4083-9a27-d4f12dac6ba1 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2058.979136] env[62405]: DEBUG nova.compute.manager [req-1b56498e-d667-4c2d-9b88-5876bc2b0c58 req-bd29e0f4-6e84-4807-8d85-4356db0b3eeb service nova] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Refreshing instance network info cache due to event network-changed-2ba16494-2db9-4083-9a27-d4f12dac6ba1. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 2058.979403] env[62405]: DEBUG oslo_concurrency.lockutils [req-1b56498e-d667-4c2d-9b88-5876bc2b0c58 req-bd29e0f4-6e84-4807-8d85-4356db0b3eeb service nova] Acquiring lock "refresh_cache-81d9be97-9147-4754-80c2-68c1a389842e" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2058.979838] env[62405]: DEBUG oslo_concurrency.lockutils [req-1b56498e-d667-4c2d-9b88-5876bc2b0c58 req-bd29e0f4-6e84-4807-8d85-4356db0b3eeb service nova] Acquired lock "refresh_cache-81d9be97-9147-4754-80c2-68c1a389842e" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2058.979838] env[62405]: DEBUG nova.network.neutron [req-1b56498e-d667-4c2d-9b88-5876bc2b0c58 req-bd29e0f4-6e84-4807-8d85-4356db0b3eeb service nova] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Refreshing network info cache for port 2ba16494-2db9-4083-9a27-d4f12dac6ba1 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2059.037975] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1948182, 'name': CreateVM_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2059.057464] env[62405]: DEBUG nova.compute.manager [None req-11bb5068-76a3-46e0-9fac-6acb19c8e9d8 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2059.057464] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-11bb5068-76a3-46e0-9fac-6acb19c8e9d8 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2059.057464] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0f6aec7a-4953-4c73-b11a-8e71fd2ae497 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.065562] env[62405]: DEBUG oslo_vmware.api [None req-11bb5068-76a3-46e0-9fac-6acb19c8e9d8 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for the task: (returnval){ [ 2059.065562] env[62405]: value = "task-1948183" [ 2059.065562] env[62405]: _type = "Task" [ 2059.065562] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2059.075133] env[62405]: DEBUG oslo_vmware.api [None req-11bb5068-76a3-46e0-9fac-6acb19c8e9d8 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948183, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2059.228315] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2059.535582] env[62405]: DEBUG nova.network.neutron [req-cc9533fa-6e47-4b47-a7b6-fd43d10ad95e req-c8b2f2b9-8726-4a5a-b838-03b8cba1d13a service nova] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] Updated VIF entry in instance network info cache for port 3d9e960f-b38a-4714-93c0-7ff8857554fe. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2059.535980] env[62405]: DEBUG nova.network.neutron [req-cc9533fa-6e47-4b47-a7b6-fd43d10ad95e req-c8b2f2b9-8726-4a5a-b838-03b8cba1d13a service nova] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] Updating instance_info_cache with network_info: [{"id": "3d9e960f-b38a-4714-93c0-7ff8857554fe", "address": "fa:16:3e:4e:fa:e7", "network": {"id": "2019f333-b70a-4976-97ee-8748220e1f48", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-558435229-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "633b4e729a054bc69593b789af9ee070", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb143ef7-8271-4a8a-a4aa-8eba9a89f6a1", "external-id": "nsx-vlan-transportzone-504", "segmentation_id": 504, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d9e960f-b3", "ovs_interfaceid": "3d9e960f-b38a-4714-93c0-7ff8857554fe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2059.543765] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1948182, 'name': CreateVM_Task, 'duration_secs': 0.585214} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2059.543765] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2059.544591] env[62405]: DEBUG oslo_concurrency.lockutils [None req-eb10086d-a8c6-46c3-9f57-3125ebe3729b tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2059.545894] env[62405]: DEBUG oslo_concurrency.lockutils [None req-eb10086d-a8c6-46c3-9f57-3125ebe3729b tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2059.545894] env[62405]: DEBUG oslo_concurrency.lockutils [None req-eb10086d-a8c6-46c3-9f57-3125ebe3729b tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2059.545894] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-84373792-8a60-4ff1-aa29-125a5943675d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.550506] env[62405]: DEBUG oslo_vmware.api [None req-eb10086d-a8c6-46c3-9f57-3125ebe3729b tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Waiting for the task: (returnval){ [ 2059.550506] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52d45ac2-c3b5-6ee2-ecc6-53deed241c31" [ 2059.550506] env[62405]: _type = "Task" [ 2059.550506] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2059.565293] env[62405]: DEBUG oslo_vmware.api [None req-eb10086d-a8c6-46c3-9f57-3125ebe3729b tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52d45ac2-c3b5-6ee2-ecc6-53deed241c31, 'name': SearchDatastore_Task, 'duration_secs': 0.010775} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2059.565690] env[62405]: DEBUG oslo_concurrency.lockutils [None req-eb10086d-a8c6-46c3-9f57-3125ebe3729b tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2059.565854] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-eb10086d-a8c6-46c3-9f57-3125ebe3729b tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2059.566046] env[62405]: DEBUG oslo_concurrency.lockutils [None req-eb10086d-a8c6-46c3-9f57-3125ebe3729b tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2059.566196] env[62405]: DEBUG oslo_concurrency.lockutils [None req-eb10086d-a8c6-46c3-9f57-3125ebe3729b tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2059.566377] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb10086d-a8c6-46c3-9f57-3125ebe3729b tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2059.566688] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-446bc8be-56a5-4f9b-b5de-314b7ae14325 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.577522] env[62405]: DEBUG oslo_vmware.api [None req-11bb5068-76a3-46e0-9fac-6acb19c8e9d8 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948183, 'name': PowerOffVM_Task, 'duration_secs': 0.228127} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2059.578658] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-11bb5068-76a3-46e0-9fac-6acb19c8e9d8 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2059.578869] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-11bb5068-76a3-46e0-9fac-6acb19c8e9d8 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Volume detach. Driver type: vmdk {{(pid=62405) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2059.579084] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-11bb5068-76a3-46e0-9fac-6acb19c8e9d8 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-401571', 'volume_id': '10498adc-afa1-4e8d-87d5-9511db990a6a', 'name': 'volume-10498adc-afa1-4e8d-87d5-9511db990a6a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': 'c39d9059-8da4-4c8d-99ab-d66b8445e7da', 'attached_at': '2024-12-21T03:30:52.000000', 'detached_at': '', 'volume_id': '10498adc-afa1-4e8d-87d5-9511db990a6a', 'serial': '10498adc-afa1-4e8d-87d5-9511db990a6a'} {{(pid=62405) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2059.579383] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb10086d-a8c6-46c3-9f57-3125ebe3729b tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2059.579541] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-eb10086d-a8c6-46c3-9f57-3125ebe3729b tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2059.580709] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f1cdccf-090d-4442-b64a-e015c51b5815 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.583117] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eb4353f3-d8c6-4f42-9325-31db4b9be23d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.588540] env[62405]: DEBUG oslo_vmware.api [None req-eb10086d-a8c6-46c3-9f57-3125ebe3729b tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Waiting for the task: (returnval){ [ 2059.588540] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52975f9f-0e6a-0a4a-e330-d9c5a9f1bde7" [ 2059.588540] env[62405]: _type = "Task" [ 2059.588540] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2059.610483] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0514727f-173c-408d-822a-b62de9e5ec49 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.618340] env[62405]: DEBUG oslo_vmware.api [None req-eb10086d-a8c6-46c3-9f57-3125ebe3729b tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52975f9f-0e6a-0a4a-e330-d9c5a9f1bde7, 'name': SearchDatastore_Task, 'duration_secs': 0.009081} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2059.620575] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d1e43e4e-3e6a-4dcb-b701-22db714386dd {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.623037] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a63d63f-7195-46e7-a2a1-3fc2cca8b77a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.628015] env[62405]: DEBUG oslo_vmware.api [None req-eb10086d-a8c6-46c3-9f57-3125ebe3729b tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Waiting for the task: (returnval){ [ 2059.628015] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]522a27ac-77fd-5982-8502-138f3db0ab02" [ 2059.628015] env[62405]: _type = "Task" [ 2059.628015] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2059.652458] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16c9bbde-f88e-4f5c-9ecd-0feed957187b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.658340] env[62405]: DEBUG oslo_vmware.api [None req-eb10086d-a8c6-46c3-9f57-3125ebe3729b tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]522a27ac-77fd-5982-8502-138f3db0ab02, 'name': SearchDatastore_Task, 'duration_secs': 0.011021} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2059.658937] env[62405]: DEBUG oslo_concurrency.lockutils [None req-eb10086d-a8c6-46c3-9f57-3125ebe3729b tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2059.659214] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb10086d-a8c6-46c3-9f57-3125ebe3729b tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] f1e9a2e7-0fd3-4a89-8c33-bab6d1987230/f1e9a2e7-0fd3-4a89-8c33-bab6d1987230.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2059.659454] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b9c60aa2-17b4-4e1d-bfa1-41310f9bbf3e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.671743] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-11bb5068-76a3-46e0-9fac-6acb19c8e9d8 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] The volume has not been displaced from its original location: [datastore1] volume-10498adc-afa1-4e8d-87d5-9511db990a6a/volume-10498adc-afa1-4e8d-87d5-9511db990a6a.vmdk. No consolidation needed. {{(pid=62405) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2059.677299] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-11bb5068-76a3-46e0-9fac-6acb19c8e9d8 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Reconfiguring VM instance instance-00000057 to detach disk 2001 {{(pid=62405) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2059.677983] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6051f305-d489-4ece-9c5f-db360a468349 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.696107] env[62405]: DEBUG oslo_vmware.api [None req-eb10086d-a8c6-46c3-9f57-3125ebe3729b tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Waiting for the task: (returnval){ [ 2059.696107] env[62405]: value = "task-1948184" [ 2059.696107] env[62405]: _type = "Task" [ 2059.696107] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2059.699501] env[62405]: DEBUG nova.compute.manager [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 7c74cae9-1607-4928-a927-f0c8b86f7698] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2059.701375] env[62405]: DEBUG oslo_vmware.api [None req-11bb5068-76a3-46e0-9fac-6acb19c8e9d8 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for the task: (returnval){ [ 2059.701375] env[62405]: value = "task-1948185" [ 2059.701375] env[62405]: _type = "Task" [ 2059.701375] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2059.707900] env[62405]: DEBUG oslo_vmware.api [None req-eb10086d-a8c6-46c3-9f57-3125ebe3729b tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Task: {'id': task-1948184, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2059.713078] env[62405]: DEBUG oslo_vmware.api [None req-11bb5068-76a3-46e0-9fac-6acb19c8e9d8 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948185, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2059.736362] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 34f4f278-bd4d-43f9-af83-adb48cfb0adc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2059.736362] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Total usable vcpus: 48, total allocated vcpus: 11 {{(pid=62405) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2059.736362] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2624MB phys_disk=200GB used_disk=11GB total_vcpus=48 used_vcpus=11 pci_stats=[] {{(pid=62405) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2059.739829] env[62405]: DEBUG nova.virt.hardware [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2059.740097] env[62405]: DEBUG nova.virt.hardware [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2059.740258] env[62405]: DEBUG nova.virt.hardware [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2059.740438] env[62405]: DEBUG nova.virt.hardware [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2059.740600] env[62405]: DEBUG nova.virt.hardware [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2059.740745] env[62405]: DEBUG nova.virt.hardware [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2059.740946] env[62405]: DEBUG nova.virt.hardware [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2059.744029] env[62405]: DEBUG nova.virt.hardware [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2059.744029] env[62405]: DEBUG nova.virt.hardware [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2059.744029] env[62405]: DEBUG nova.virt.hardware [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2059.744029] env[62405]: DEBUG nova.virt.hardware [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2059.744029] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb287e36-689f-460f-8e3d-edeb350af28e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.753256] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88135eb5-8e50-4e27-a54c-0c5fd8a829b1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.772803] env[62405]: DEBUG nova.network.neutron [req-1b56498e-d667-4c2d-9b88-5876bc2b0c58 req-bd29e0f4-6e84-4807-8d85-4356db0b3eeb service nova] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Updated VIF entry in instance network info cache for port 2ba16494-2db9-4083-9a27-d4f12dac6ba1. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2059.773203] env[62405]: DEBUG nova.network.neutron [req-1b56498e-d667-4c2d-9b88-5876bc2b0c58 req-bd29e0f4-6e84-4807-8d85-4356db0b3eeb service nova] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Updating instance_info_cache with network_info: [{"id": "2ba16494-2db9-4083-9a27-d4f12dac6ba1", "address": "fa:16:3e:66:df:57", "network": {"id": "bb161d6c-1986-486e-a6b7-5b1dacc985ee", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-590658377-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.213", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7541d8c77a3f434094bc30a4d402bfcb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ba16494-2d", "ovs_interfaceid": "2ba16494-2db9-4083-9a27-d4f12dac6ba1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2059.954261] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4ea7ac7-bf94-439a-bb73-fc47e032b291 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.966662] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b299f43-490c-4151-afe1-d6a0a1b8dbdb {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2060.008789] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d1a9fc2-f3d2-4e30-bfa2-04b52c8373d6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2060.018139] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ea917aa-37e4-490b-8ea9-0687c79c64e3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2060.033608] env[62405]: DEBUG nova.compute.provider_tree [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2060.039300] env[62405]: DEBUG oslo_concurrency.lockutils [req-cc9533fa-6e47-4b47-a7b6-fd43d10ad95e req-c8b2f2b9-8726-4a5a-b838-03b8cba1d13a service nova] Releasing lock "refresh_cache-f1e9a2e7-0fd3-4a89-8c33-bab6d1987230" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2060.204917] env[62405]: DEBUG oslo_vmware.api [None req-eb10086d-a8c6-46c3-9f57-3125ebe3729b tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Task: {'id': task-1948184, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.511496} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2060.207846] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb10086d-a8c6-46c3-9f57-3125ebe3729b tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] f1e9a2e7-0fd3-4a89-8c33-bab6d1987230/f1e9a2e7-0fd3-4a89-8c33-bab6d1987230.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2060.208084] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-eb10086d-a8c6-46c3-9f57-3125ebe3729b tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2060.208341] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d322a27f-367b-4bf7-8f89-4786d23303cf {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2060.215325] env[62405]: DEBUG oslo_vmware.api [None req-11bb5068-76a3-46e0-9fac-6acb19c8e9d8 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948185, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2060.216541] env[62405]: DEBUG oslo_vmware.api [None req-eb10086d-a8c6-46c3-9f57-3125ebe3729b tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Waiting for the task: (returnval){ [ 2060.216541] env[62405]: value = "task-1948186" [ 2060.216541] env[62405]: _type = "Task" [ 2060.216541] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2060.223811] env[62405]: DEBUG oslo_vmware.api [None req-eb10086d-a8c6-46c3-9f57-3125ebe3729b tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Task: {'id': task-1948186, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2060.276796] env[62405]: DEBUG oslo_concurrency.lockutils [req-1b56498e-d667-4c2d-9b88-5876bc2b0c58 req-bd29e0f4-6e84-4807-8d85-4356db0b3eeb service nova] Releasing lock "refresh_cache-81d9be97-9147-4754-80c2-68c1a389842e" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2060.328353] env[62405]: DEBUG oslo_concurrency.lockutils [None req-280b76ad-aa61-45bf-96d6-d80b2055c2c9 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Acquiring lock "58691f22-5acd-45db-b587-df784a000813" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2060.328659] env[62405]: DEBUG oslo_concurrency.lockutils [None req-280b76ad-aa61-45bf-96d6-d80b2055c2c9 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Lock "58691f22-5acd-45db-b587-df784a000813" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2060.328912] env[62405]: DEBUG oslo_concurrency.lockutils [None req-280b76ad-aa61-45bf-96d6-d80b2055c2c9 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Acquiring lock "58691f22-5acd-45db-b587-df784a000813-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2060.329543] env[62405]: DEBUG oslo_concurrency.lockutils [None req-280b76ad-aa61-45bf-96d6-d80b2055c2c9 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Lock "58691f22-5acd-45db-b587-df784a000813-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2060.329543] env[62405]: DEBUG oslo_concurrency.lockutils [None req-280b76ad-aa61-45bf-96d6-d80b2055c2c9 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Lock "58691f22-5acd-45db-b587-df784a000813-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2060.331711] env[62405]: INFO nova.compute.manager [None req-280b76ad-aa61-45bf-96d6-d80b2055c2c9 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: 58691f22-5acd-45db-b587-df784a000813] Terminating instance [ 2060.536952] env[62405]: DEBUG nova.scheduler.client.report [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2060.628396] env[62405]: DEBUG nova.compute.manager [req-8c09e683-5b85-4a90-97ff-213a5321d388 req-b8abd758-ee9b-40ac-949e-e7cc2d562971 service nova] [instance: 7c74cae9-1607-4928-a927-f0c8b86f7698] Received event network-vif-plugged-afd2e7f4-e21f-433e-ab9a-fb2e5d1e3993 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2060.628622] env[62405]: DEBUG oslo_concurrency.lockutils [req-8c09e683-5b85-4a90-97ff-213a5321d388 req-b8abd758-ee9b-40ac-949e-e7cc2d562971 service nova] Acquiring lock "7c74cae9-1607-4928-a927-f0c8b86f7698-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2060.628829] env[62405]: DEBUG oslo_concurrency.lockutils [req-8c09e683-5b85-4a90-97ff-213a5321d388 req-b8abd758-ee9b-40ac-949e-e7cc2d562971 service nova] Lock "7c74cae9-1607-4928-a927-f0c8b86f7698-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2060.629007] env[62405]: DEBUG oslo_concurrency.lockutils [req-8c09e683-5b85-4a90-97ff-213a5321d388 req-b8abd758-ee9b-40ac-949e-e7cc2d562971 service nova] Lock "7c74cae9-1607-4928-a927-f0c8b86f7698-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2060.629258] env[62405]: DEBUG nova.compute.manager [req-8c09e683-5b85-4a90-97ff-213a5321d388 req-b8abd758-ee9b-40ac-949e-e7cc2d562971 service nova] [instance: 7c74cae9-1607-4928-a927-f0c8b86f7698] No waiting events found dispatching network-vif-plugged-afd2e7f4-e21f-433e-ab9a-fb2e5d1e3993 {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2060.629446] env[62405]: WARNING nova.compute.manager [req-8c09e683-5b85-4a90-97ff-213a5321d388 req-b8abd758-ee9b-40ac-949e-e7cc2d562971 service nova] [instance: 7c74cae9-1607-4928-a927-f0c8b86f7698] Received unexpected event network-vif-plugged-afd2e7f4-e21f-433e-ab9a-fb2e5d1e3993 for instance with vm_state building and task_state spawning. [ 2060.714139] env[62405]: DEBUG oslo_vmware.api [None req-11bb5068-76a3-46e0-9fac-6acb19c8e9d8 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948185, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2060.725054] env[62405]: DEBUG oslo_vmware.api [None req-eb10086d-a8c6-46c3-9f57-3125ebe3729b tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Task: {'id': task-1948186, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081387} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2060.725333] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-eb10086d-a8c6-46c3-9f57-3125ebe3729b tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2060.726080] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00f922e3-e8b5-45b3-ad15-4a6cb77ddf6f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2060.747546] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb10086d-a8c6-46c3-9f57-3125ebe3729b tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] Reconfiguring VM instance instance-00000071 to attach disk [datastore1] f1e9a2e7-0fd3-4a89-8c33-bab6d1987230/f1e9a2e7-0fd3-4a89-8c33-bab6d1987230.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2060.748430] env[62405]: DEBUG nova.network.neutron [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 7c74cae9-1607-4928-a927-f0c8b86f7698] Successfully updated port: afd2e7f4-e21f-433e-ab9a-fb2e5d1e3993 {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2060.749577] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fa4ec07a-ef5e-4aa0-a9d5-a926cd73b730 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2060.765454] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Acquiring lock "refresh_cache-7c74cae9-1607-4928-a927-f0c8b86f7698" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2060.765603] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Acquired lock "refresh_cache-7c74cae9-1607-4928-a927-f0c8b86f7698" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2060.765790] env[62405]: DEBUG nova.network.neutron [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 7c74cae9-1607-4928-a927-f0c8b86f7698] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2060.772563] env[62405]: DEBUG oslo_vmware.api [None req-eb10086d-a8c6-46c3-9f57-3125ebe3729b tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Waiting for the task: (returnval){ [ 2060.772563] env[62405]: value = "task-1948187" [ 2060.772563] env[62405]: _type = "Task" [ 2060.772563] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2060.781668] env[62405]: DEBUG oslo_vmware.api [None req-eb10086d-a8c6-46c3-9f57-3125ebe3729b tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Task: {'id': task-1948187, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2060.835820] env[62405]: DEBUG nova.compute.manager [None req-280b76ad-aa61-45bf-96d6-d80b2055c2c9 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: 58691f22-5acd-45db-b587-df784a000813] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2060.836087] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-280b76ad-aa61-45bf-96d6-d80b2055c2c9 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: 58691f22-5acd-45db-b587-df784a000813] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2060.836917] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8014025d-66e1-42cb-b76d-6bf720df8cf7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2060.845174] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-280b76ad-aa61-45bf-96d6-d80b2055c2c9 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: 58691f22-5acd-45db-b587-df784a000813] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2060.845449] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-55f95203-f9f9-461a-ae43-6c99fd62cca4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2060.852038] env[62405]: DEBUG oslo_vmware.api [None req-280b76ad-aa61-45bf-96d6-d80b2055c2c9 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Waiting for the task: (returnval){ [ 2060.852038] env[62405]: value = "task-1948188" [ 2060.852038] env[62405]: _type = "Task" [ 2060.852038] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2060.859304] env[62405]: DEBUG oslo_vmware.api [None req-280b76ad-aa61-45bf-96d6-d80b2055c2c9 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Task: {'id': task-1948188, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2061.008117] env[62405]: DEBUG nova.compute.manager [req-22b3a69b-5ed5-4ca0-a967-01040f7be582 req-4c321c54-cd96-4410-b8a4-da0adda922c4 service nova] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Received event network-changed-2ba16494-2db9-4083-9a27-d4f12dac6ba1 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2061.008332] env[62405]: DEBUG nova.compute.manager [req-22b3a69b-5ed5-4ca0-a967-01040f7be582 req-4c321c54-cd96-4410-b8a4-da0adda922c4 service nova] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Refreshing instance network info cache due to event network-changed-2ba16494-2db9-4083-9a27-d4f12dac6ba1. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 2061.008549] env[62405]: DEBUG oslo_concurrency.lockutils [req-22b3a69b-5ed5-4ca0-a967-01040f7be582 req-4c321c54-cd96-4410-b8a4-da0adda922c4 service nova] Acquiring lock "refresh_cache-81d9be97-9147-4754-80c2-68c1a389842e" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2061.008697] env[62405]: DEBUG oslo_concurrency.lockutils [req-22b3a69b-5ed5-4ca0-a967-01040f7be582 req-4c321c54-cd96-4410-b8a4-da0adda922c4 service nova] Acquired lock "refresh_cache-81d9be97-9147-4754-80c2-68c1a389842e" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2061.008862] env[62405]: DEBUG nova.network.neutron [req-22b3a69b-5ed5-4ca0-a967-01040f7be582 req-4c321c54-cd96-4410-b8a4-da0adda922c4 service nova] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Refreshing network info cache for port 2ba16494-2db9-4083-9a27-d4f12dac6ba1 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2061.044339] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62405) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2061.044664] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.365s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2061.044873] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.226s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2061.046422] env[62405]: INFO nova.compute.claims [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2061.049266] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2061.049414] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Cleaning up deleted instances {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11746}} [ 2061.215105] env[62405]: DEBUG oslo_vmware.api [None req-11bb5068-76a3-46e0-9fac-6acb19c8e9d8 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948185, 'name': ReconfigVM_Task, 'duration_secs': 1.271334} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2061.215426] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-11bb5068-76a3-46e0-9fac-6acb19c8e9d8 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Reconfigured VM instance instance-00000057 to detach disk 2001 {{(pid=62405) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2061.219976] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7656f5c2-cfa5-4b4b-b402-e938dd85b2b9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.234727] env[62405]: DEBUG oslo_vmware.api [None req-11bb5068-76a3-46e0-9fac-6acb19c8e9d8 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for the task: (returnval){ [ 2061.234727] env[62405]: value = "task-1948189" [ 2061.234727] env[62405]: _type = "Task" [ 2061.234727] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2061.243667] env[62405]: DEBUG oslo_vmware.api [None req-11bb5068-76a3-46e0-9fac-6acb19c8e9d8 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948189, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2061.281233] env[62405]: DEBUG oslo_vmware.api [None req-eb10086d-a8c6-46c3-9f57-3125ebe3729b tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Task: {'id': task-1948187, 'name': ReconfigVM_Task, 'duration_secs': 0.442039} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2061.281518] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb10086d-a8c6-46c3-9f57-3125ebe3729b tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] Reconfigured VM instance instance-00000071 to attach disk [datastore1] f1e9a2e7-0fd3-4a89-8c33-bab6d1987230/f1e9a2e7-0fd3-4a89-8c33-bab6d1987230.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2061.282519] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3cf1a2e6-d0d1-4f97-a28f-4814d7d38703 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.288023] env[62405]: DEBUG oslo_vmware.api [None req-eb10086d-a8c6-46c3-9f57-3125ebe3729b tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Waiting for the task: (returnval){ [ 2061.288023] env[62405]: value = "task-1948190" [ 2061.288023] env[62405]: _type = "Task" [ 2061.288023] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2061.295218] env[62405]: DEBUG oslo_vmware.api [None req-eb10086d-a8c6-46c3-9f57-3125ebe3729b tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Task: {'id': task-1948190, 'name': Rename_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2061.296901] env[62405]: DEBUG nova.network.neutron [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 7c74cae9-1607-4928-a927-f0c8b86f7698] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2061.361346] env[62405]: DEBUG oslo_vmware.api [None req-280b76ad-aa61-45bf-96d6-d80b2055c2c9 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Task: {'id': task-1948188, 'name': PowerOffVM_Task, 'duration_secs': 0.333641} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2061.361655] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-280b76ad-aa61-45bf-96d6-d80b2055c2c9 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: 58691f22-5acd-45db-b587-df784a000813] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2061.361772] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-280b76ad-aa61-45bf-96d6-d80b2055c2c9 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: 58691f22-5acd-45db-b587-df784a000813] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2061.362009] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-707aae77-39ef-4442-a081-f3fc005fbca2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.424273] env[62405]: DEBUG nova.network.neutron [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 7c74cae9-1607-4928-a927-f0c8b86f7698] Updating instance_info_cache with network_info: [{"id": "afd2e7f4-e21f-433e-ab9a-fb2e5d1e3993", "address": "fa:16:3e:98:99:8d", "network": {"id": "dfd15c21-b7a5-492a-afe3-8eb96515351d", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-175692276-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6014bab6bc9a4b059bab88e44b31f446", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "950a2f67-7668-4376-9d48-b38dca033c40", "external-id": "nsx-vlan-transportzone-549", "segmentation_id": 549, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapafd2e7f4-e2", "ovs_interfaceid": "afd2e7f4-e21f-433e-ab9a-fb2e5d1e3993", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2061.450380] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-280b76ad-aa61-45bf-96d6-d80b2055c2c9 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: 58691f22-5acd-45db-b587-df784a000813] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2061.450727] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-280b76ad-aa61-45bf-96d6-d80b2055c2c9 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: 58691f22-5acd-45db-b587-df784a000813] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2061.450866] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-280b76ad-aa61-45bf-96d6-d80b2055c2c9 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Deleting the datastore file [datastore1] 58691f22-5acd-45db-b587-df784a000813 {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2061.451150] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c4c6def4-a753-418e-8405-f57736de9785 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.458678] env[62405]: DEBUG oslo_vmware.api [None req-280b76ad-aa61-45bf-96d6-d80b2055c2c9 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Waiting for the task: (returnval){ [ 2061.458678] env[62405]: value = "task-1948192" [ 2061.458678] env[62405]: _type = "Task" [ 2061.458678] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2061.466662] env[62405]: DEBUG oslo_vmware.api [None req-280b76ad-aa61-45bf-96d6-d80b2055c2c9 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Task: {'id': task-1948192, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2061.566353] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] There are 58 instances to clean {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11755}} [ 2061.566353] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 2e6ca3c8-d7e1-4a5d-9fdd-aa944fd9c2f8] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 2061.735038] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3f1b638-1831-4133-b8e4-42f58a4999b2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.747875] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92c63e73-e6a8-44b9-9fd8-c6f6005cacd3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.750795] env[62405]: DEBUG oslo_vmware.api [None req-11bb5068-76a3-46e0-9fac-6acb19c8e9d8 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948189, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2061.752636] env[62405]: DEBUG nova.network.neutron [req-22b3a69b-5ed5-4ca0-a967-01040f7be582 req-4c321c54-cd96-4410-b8a4-da0adda922c4 service nova] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Updated VIF entry in instance network info cache for port 2ba16494-2db9-4083-9a27-d4f12dac6ba1. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2061.752636] env[62405]: DEBUG nova.network.neutron [req-22b3a69b-5ed5-4ca0-a967-01040f7be582 req-4c321c54-cd96-4410-b8a4-da0adda922c4 service nova] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Updating instance_info_cache with network_info: [{"id": "2ba16494-2db9-4083-9a27-d4f12dac6ba1", "address": "fa:16:3e:66:df:57", "network": {"id": "bb161d6c-1986-486e-a6b7-5b1dacc985ee", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-590658377-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.213", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7541d8c77a3f434094bc30a4d402bfcb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ba16494-2d", "ovs_interfaceid": "2ba16494-2db9-4083-9a27-d4f12dac6ba1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2061.783211] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ff5bee2-ca99-48e6-bbc0-a128c4ac5dcc {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.794264] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9822301-746b-46f7-9c03-52f94e480963 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.802968] env[62405]: DEBUG oslo_vmware.api [None req-eb10086d-a8c6-46c3-9f57-3125ebe3729b tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Task: {'id': task-1948190, 'name': Rename_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2061.810771] env[62405]: DEBUG nova.compute.provider_tree [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2061.926869] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Releasing lock "refresh_cache-7c74cae9-1607-4928-a927-f0c8b86f7698" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2061.927225] env[62405]: DEBUG nova.compute.manager [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 7c74cae9-1607-4928-a927-f0c8b86f7698] Instance network_info: |[{"id": "afd2e7f4-e21f-433e-ab9a-fb2e5d1e3993", "address": "fa:16:3e:98:99:8d", "network": {"id": "dfd15c21-b7a5-492a-afe3-8eb96515351d", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-175692276-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6014bab6bc9a4b059bab88e44b31f446", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "950a2f67-7668-4376-9d48-b38dca033c40", "external-id": "nsx-vlan-transportzone-549", "segmentation_id": 549, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapafd2e7f4-e2", "ovs_interfaceid": "afd2e7f4-e21f-433e-ab9a-fb2e5d1e3993", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2061.927751] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 7c74cae9-1607-4928-a927-f0c8b86f7698] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:98:99:8d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '950a2f67-7668-4376-9d48-b38dca033c40', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'afd2e7f4-e21f-433e-ab9a-fb2e5d1e3993', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2061.935376] env[62405]: DEBUG oslo.service.loopingcall [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2061.935581] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7c74cae9-1607-4928-a927-f0c8b86f7698] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2061.935804] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-03c32cfa-63a9-4dba-aa36-c19705a77b6e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.955453] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2061.955453] env[62405]: value = "task-1948193" [ 2061.955453] env[62405]: _type = "Task" [ 2061.955453] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2061.968126] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1948193, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2061.973033] env[62405]: DEBUG oslo_vmware.api [None req-280b76ad-aa61-45bf-96d6-d80b2055c2c9 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Task: {'id': task-1948192, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.151266} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2061.973276] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-280b76ad-aa61-45bf-96d6-d80b2055c2c9 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2061.973537] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-280b76ad-aa61-45bf-96d6-d80b2055c2c9 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: 58691f22-5acd-45db-b587-df784a000813] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2061.973790] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-280b76ad-aa61-45bf-96d6-d80b2055c2c9 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: 58691f22-5acd-45db-b587-df784a000813] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2061.974051] env[62405]: INFO nova.compute.manager [None req-280b76ad-aa61-45bf-96d6-d80b2055c2c9 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: 58691f22-5acd-45db-b587-df784a000813] Took 1.14 seconds to destroy the instance on the hypervisor. [ 2061.974557] env[62405]: DEBUG oslo.service.loopingcall [None req-280b76ad-aa61-45bf-96d6-d80b2055c2c9 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2061.974667] env[62405]: DEBUG nova.compute.manager [-] [instance: 58691f22-5acd-45db-b587-df784a000813] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2061.974826] env[62405]: DEBUG nova.network.neutron [-] [instance: 58691f22-5acd-45db-b587-df784a000813] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2062.072534] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 798257f7-0590-4f82-82b0-d428cc6e6e92] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 2062.248242] env[62405]: DEBUG oslo_vmware.api [None req-11bb5068-76a3-46e0-9fac-6acb19c8e9d8 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948189, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2062.254958] env[62405]: DEBUG oslo_concurrency.lockutils [req-22b3a69b-5ed5-4ca0-a967-01040f7be582 req-4c321c54-cd96-4410-b8a4-da0adda922c4 service nova] Releasing lock "refresh_cache-81d9be97-9147-4754-80c2-68c1a389842e" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2062.300609] env[62405]: DEBUG oslo_vmware.api [None req-eb10086d-a8c6-46c3-9f57-3125ebe3729b tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Task: {'id': task-1948190, 'name': Rename_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2062.332748] env[62405]: ERROR nova.scheduler.client.report [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [req-c8133e8b-cc50-48a1-b710-b82d95b87d1a] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7d5eded7-a501-4fa6-b1d3-60e273d555d7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-c8133e8b-cc50-48a1-b710-b82d95b87d1a"}]} [ 2062.348880] env[62405]: DEBUG nova.scheduler.client.report [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Refreshing inventories for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 2062.363400] env[62405]: DEBUG nova.scheduler.client.report [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Updating ProviderTree inventory for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 2062.363657] env[62405]: DEBUG nova.compute.provider_tree [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2062.374773] env[62405]: DEBUG nova.scheduler.client.report [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Refreshing aggregate associations for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7, aggregates: None {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 2062.393899] env[62405]: DEBUG nova.scheduler.client.report [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Refreshing trait associations for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 2062.469973] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1948193, 'name': CreateVM_Task} progress is 25%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2062.573597] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e381f3e-d269-4bbe-959e-9c3872e89738 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2062.576747] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 989a7146-71ea-433b-86f9-b7a0f0ee91b4] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 2062.583469] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a42b20b-b33e-40e6-9f0f-1d0b87fb1db5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2062.614460] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14c2266e-412b-48f4-b5d6-616af8c59045 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2062.622552] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2d55037-9c00-4d3d-acc4-532ef4e9b3d4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2062.637119] env[62405]: DEBUG nova.compute.provider_tree [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2062.657054] env[62405]: DEBUG nova.compute.manager [req-b1da4cc2-3863-41bc-8530-6c0c6f3b9d55 req-c42a373d-eee6-4a0b-82f8-c0ed335041b0 service nova] [instance: 7c74cae9-1607-4928-a927-f0c8b86f7698] Received event network-changed-afd2e7f4-e21f-433e-ab9a-fb2e5d1e3993 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2062.657195] env[62405]: DEBUG nova.compute.manager [req-b1da4cc2-3863-41bc-8530-6c0c6f3b9d55 req-c42a373d-eee6-4a0b-82f8-c0ed335041b0 service nova] [instance: 7c74cae9-1607-4928-a927-f0c8b86f7698] Refreshing instance network info cache due to event network-changed-afd2e7f4-e21f-433e-ab9a-fb2e5d1e3993. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 2062.657412] env[62405]: DEBUG oslo_concurrency.lockutils [req-b1da4cc2-3863-41bc-8530-6c0c6f3b9d55 req-c42a373d-eee6-4a0b-82f8-c0ed335041b0 service nova] Acquiring lock "refresh_cache-7c74cae9-1607-4928-a927-f0c8b86f7698" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2062.657633] env[62405]: DEBUG oslo_concurrency.lockutils [req-b1da4cc2-3863-41bc-8530-6c0c6f3b9d55 req-c42a373d-eee6-4a0b-82f8-c0ed335041b0 service nova] Acquired lock "refresh_cache-7c74cae9-1607-4928-a927-f0c8b86f7698" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2062.657856] env[62405]: DEBUG nova.network.neutron [req-b1da4cc2-3863-41bc-8530-6c0c6f3b9d55 req-c42a373d-eee6-4a0b-82f8-c0ed335041b0 service nova] [instance: 7c74cae9-1607-4928-a927-f0c8b86f7698] Refreshing network info cache for port afd2e7f4-e21f-433e-ab9a-fb2e5d1e3993 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2062.747256] env[62405]: DEBUG oslo_vmware.api [None req-11bb5068-76a3-46e0-9fac-6acb19c8e9d8 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948189, 'name': ReconfigVM_Task, 'duration_secs': 1.078728} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2062.747544] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-11bb5068-76a3-46e0-9fac-6acb19c8e9d8 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-401571', 'volume_id': '10498adc-afa1-4e8d-87d5-9511db990a6a', 'name': 'volume-10498adc-afa1-4e8d-87d5-9511db990a6a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': 'c39d9059-8da4-4c8d-99ab-d66b8445e7da', 'attached_at': '2024-12-21T03:30:52.000000', 'detached_at': '', 'volume_id': '10498adc-afa1-4e8d-87d5-9511db990a6a', 'serial': '10498adc-afa1-4e8d-87d5-9511db990a6a'} {{(pid=62405) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2062.747880] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-11bb5068-76a3-46e0-9fac-6acb19c8e9d8 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2062.748617] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e15abaa-24ea-4851-8791-c7a446ead401 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2062.754964] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-11bb5068-76a3-46e0-9fac-6acb19c8e9d8 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2062.755192] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2e10b251-3e75-40e3-9944-0ddf903a70d5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2062.800229] env[62405]: DEBUG oslo_vmware.api [None req-eb10086d-a8c6-46c3-9f57-3125ebe3729b tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Task: {'id': task-1948190, 'name': Rename_Task, 'duration_secs': 1.050555} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2062.800428] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb10086d-a8c6-46c3-9f57-3125ebe3729b tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2062.800647] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2bede64a-2a57-45cb-82ea-eb72e47d1526 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2062.807219] env[62405]: DEBUG oslo_vmware.api [None req-eb10086d-a8c6-46c3-9f57-3125ebe3729b tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Waiting for the task: (returnval){ [ 2062.807219] env[62405]: value = "task-1948195" [ 2062.807219] env[62405]: _type = "Task" [ 2062.807219] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2062.817012] env[62405]: DEBUG oslo_vmware.api [None req-eb10086d-a8c6-46c3-9f57-3125ebe3729b tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Task: {'id': task-1948195, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2062.830198] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-11bb5068-76a3-46e0-9fac-6acb19c8e9d8 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2062.830413] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-11bb5068-76a3-46e0-9fac-6acb19c8e9d8 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2062.830591] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-11bb5068-76a3-46e0-9fac-6acb19c8e9d8 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Deleting the datastore file [datastore1] c39d9059-8da4-4c8d-99ab-d66b8445e7da {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2062.830849] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-17042ad3-9a5a-405b-9bb0-2795c78b6669 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2062.836581] env[62405]: DEBUG oslo_vmware.api [None req-11bb5068-76a3-46e0-9fac-6acb19c8e9d8 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for the task: (returnval){ [ 2062.836581] env[62405]: value = "task-1948196" [ 2062.836581] env[62405]: _type = "Task" [ 2062.836581] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2062.844089] env[62405]: DEBUG oslo_vmware.api [None req-11bb5068-76a3-46e0-9fac-6acb19c8e9d8 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948196, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2062.968914] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1948193, 'name': CreateVM_Task, 'duration_secs': 0.671499} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2062.969141] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7c74cae9-1607-4928-a927-f0c8b86f7698] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2062.969860] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2062.970049] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2062.970414] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2062.970680] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-89a84b04-0b75-482a-873b-b4b29f1aedf4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2062.975621] env[62405]: DEBUG nova.network.neutron [-] [instance: 58691f22-5acd-45db-b587-df784a000813] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2062.976835] env[62405]: DEBUG oslo_vmware.api [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Waiting for the task: (returnval){ [ 2062.976835] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52558fb8-e674-e737-ec0b-8a1f6c749e39" [ 2062.976835] env[62405]: _type = "Task" [ 2062.976835] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2062.985652] env[62405]: DEBUG oslo_vmware.api [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52558fb8-e674-e737-ec0b-8a1f6c749e39, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2063.079467] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 5645c9a4-2640-4190-956f-00fc2ea03a3a] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 2063.168685] env[62405]: DEBUG nova.scheduler.client.report [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Updated inventory for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with generation 176 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 2063.168967] env[62405]: DEBUG nova.compute.provider_tree [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Updating resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 generation from 176 to 177 during operation: update_inventory {{(pid=62405) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2063.169173] env[62405]: DEBUG nova.compute.provider_tree [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2063.316842] env[62405]: DEBUG oslo_vmware.api [None req-eb10086d-a8c6-46c3-9f57-3125ebe3729b tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Task: {'id': task-1948195, 'name': PowerOnVM_Task, 'duration_secs': 0.494021} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2063.317272] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb10086d-a8c6-46c3-9f57-3125ebe3729b tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2063.317332] env[62405]: INFO nova.compute.manager [None req-eb10086d-a8c6-46c3-9f57-3125ebe3729b tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] Took 7.70 seconds to spawn the instance on the hypervisor. [ 2063.317482] env[62405]: DEBUG nova.compute.manager [None req-eb10086d-a8c6-46c3-9f57-3125ebe3729b tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2063.318250] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5158a3f-a65c-414c-ba5f-ceb58c327834 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.345490] env[62405]: DEBUG oslo_vmware.api [None req-11bb5068-76a3-46e0-9fac-6acb19c8e9d8 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948196, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.161513} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2063.345717] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-11bb5068-76a3-46e0-9fac-6acb19c8e9d8 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2063.345902] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-11bb5068-76a3-46e0-9fac-6acb19c8e9d8 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2063.346093] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-11bb5068-76a3-46e0-9fac-6acb19c8e9d8 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2063.346265] env[62405]: INFO nova.compute.manager [None req-11bb5068-76a3-46e0-9fac-6acb19c8e9d8 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Took 4.29 seconds to destroy the instance on the hypervisor. [ 2063.346496] env[62405]: DEBUG oslo.service.loopingcall [None req-11bb5068-76a3-46e0-9fac-6acb19c8e9d8 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2063.346681] env[62405]: DEBUG nova.compute.manager [-] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2063.346781] env[62405]: DEBUG nova.network.neutron [-] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2063.398919] env[62405]: DEBUG nova.network.neutron [req-b1da4cc2-3863-41bc-8530-6c0c6f3b9d55 req-c42a373d-eee6-4a0b-82f8-c0ed335041b0 service nova] [instance: 7c74cae9-1607-4928-a927-f0c8b86f7698] Updated VIF entry in instance network info cache for port afd2e7f4-e21f-433e-ab9a-fb2e5d1e3993. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2063.399324] env[62405]: DEBUG nova.network.neutron [req-b1da4cc2-3863-41bc-8530-6c0c6f3b9d55 req-c42a373d-eee6-4a0b-82f8-c0ed335041b0 service nova] [instance: 7c74cae9-1607-4928-a927-f0c8b86f7698] Updating instance_info_cache with network_info: [{"id": "afd2e7f4-e21f-433e-ab9a-fb2e5d1e3993", "address": "fa:16:3e:98:99:8d", "network": {"id": "dfd15c21-b7a5-492a-afe3-8eb96515351d", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-175692276-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6014bab6bc9a4b059bab88e44b31f446", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "950a2f67-7668-4376-9d48-b38dca033c40", "external-id": "nsx-vlan-transportzone-549", "segmentation_id": 549, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapafd2e7f4-e2", "ovs_interfaceid": "afd2e7f4-e21f-433e-ab9a-fb2e5d1e3993", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2063.479599] env[62405]: INFO nova.compute.manager [-] [instance: 58691f22-5acd-45db-b587-df784a000813] Took 1.50 seconds to deallocate network for instance. [ 2063.499544] env[62405]: DEBUG oslo_vmware.api [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52558fb8-e674-e737-ec0b-8a1f6c749e39, 'name': SearchDatastore_Task, 'duration_secs': 0.009089} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2063.499544] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2063.499544] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 7c74cae9-1607-4928-a927-f0c8b86f7698] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2063.499544] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2063.499544] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2063.499544] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2063.499544] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-de3e2de9-8d1a-4704-b722-40e1a9edde19 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.512378] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2063.512563] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2063.513337] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5d462b78-2858-462e-9bf6-20c5d463b38f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.519225] env[62405]: DEBUG oslo_vmware.api [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Waiting for the task: (returnval){ [ 2063.519225] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]528cfd30-3cdc-0320-782b-7c64573db17a" [ 2063.519225] env[62405]: _type = "Task" [ 2063.519225] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2063.528017] env[62405]: DEBUG oslo_vmware.api [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]528cfd30-3cdc-0320-782b-7c64573db17a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2063.583138] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: d937c90c-10b2-4c57-b1db-7b433c3d9017] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 2063.675325] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.630s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2063.675861] env[62405]: DEBUG nova.compute.manager [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2063.678509] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2dd168af-0bc3-4c07-9d00-1f1141995121 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.424s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2063.680173] env[62405]: INFO nova.compute.claims [None req-2dd168af-0bc3-4c07-9d00-1f1141995121 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] [instance: 34f4f278-bd4d-43f9-af83-adb48cfb0adc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2063.833979] env[62405]: DEBUG nova.compute.manager [req-842bf099-d58d-4b18-998d-d52424040323 req-d8a538fc-bc51-4866-b56a-ae78786db4e4 service nova] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Received event network-vif-deleted-2026016a-87b1-42ae-a04f-d95c5fb37377 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2063.834208] env[62405]: INFO nova.compute.manager [req-842bf099-d58d-4b18-998d-d52424040323 req-d8a538fc-bc51-4866-b56a-ae78786db4e4 service nova] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Neutron deleted interface 2026016a-87b1-42ae-a04f-d95c5fb37377; detaching it from the instance and deleting it from the info cache [ 2063.834382] env[62405]: DEBUG nova.network.neutron [req-842bf099-d58d-4b18-998d-d52424040323 req-d8a538fc-bc51-4866-b56a-ae78786db4e4 service nova] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2063.839846] env[62405]: INFO nova.compute.manager [None req-eb10086d-a8c6-46c3-9f57-3125ebe3729b tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] Took 16.10 seconds to build instance. [ 2063.903254] env[62405]: DEBUG oslo_concurrency.lockutils [req-b1da4cc2-3863-41bc-8530-6c0c6f3b9d55 req-c42a373d-eee6-4a0b-82f8-c0ed335041b0 service nova] Releasing lock "refresh_cache-7c74cae9-1607-4928-a927-f0c8b86f7698" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2063.904119] env[62405]: DEBUG nova.compute.manager [req-b1da4cc2-3863-41bc-8530-6c0c6f3b9d55 req-c42a373d-eee6-4a0b-82f8-c0ed335041b0 service nova] [instance: 58691f22-5acd-45db-b587-df784a000813] Received event network-vif-deleted-c38487c8-b41a-4c0c-8103-3392186dbdee {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2063.904119] env[62405]: INFO nova.compute.manager [req-b1da4cc2-3863-41bc-8530-6c0c6f3b9d55 req-c42a373d-eee6-4a0b-82f8-c0ed335041b0 service nova] [instance: 58691f22-5acd-45db-b587-df784a000813] Neutron deleted interface c38487c8-b41a-4c0c-8103-3392186dbdee; detaching it from the instance and deleting it from the info cache [ 2063.904119] env[62405]: DEBUG nova.network.neutron [req-b1da4cc2-3863-41bc-8530-6c0c6f3b9d55 req-c42a373d-eee6-4a0b-82f8-c0ed335041b0 service nova] [instance: 58691f22-5acd-45db-b587-df784a000813] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2063.994375] env[62405]: DEBUG oslo_concurrency.lockutils [None req-280b76ad-aa61-45bf-96d6-d80b2055c2c9 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2064.032706] env[62405]: DEBUG oslo_vmware.api [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]528cfd30-3cdc-0320-782b-7c64573db17a, 'name': SearchDatastore_Task, 'duration_secs': 0.008405} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2064.034149] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-31afe2cf-ea30-4a25-a8a8-13b2ad40f0cb {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2064.040355] env[62405]: DEBUG oslo_vmware.api [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Waiting for the task: (returnval){ [ 2064.040355] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]523d710f-06ba-d0cb-a4e3-97c4122c83be" [ 2064.040355] env[62405]: _type = "Task" [ 2064.040355] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2064.049067] env[62405]: DEBUG oslo_vmware.api [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]523d710f-06ba-d0cb-a4e3-97c4122c83be, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2064.086812] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 14512ed2-9eae-4753-b83c-8c0d0d5d9432] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 2064.184552] env[62405]: DEBUG nova.compute.utils [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2064.188700] env[62405]: DEBUG nova.compute.manager [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2064.188874] env[62405]: DEBUG nova.network.neutron [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2064.228934] env[62405]: DEBUG nova.policy [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f9c18747ac7149dba0e1c0a8fc6c0b7e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dd9a1a4650b34e388c50c7575cf09a7c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 2064.307440] env[62405]: DEBUG nova.network.neutron [-] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2064.338178] env[62405]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3c683969-d122-486c-a31b-946b7c16371c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2064.345262] env[62405]: DEBUG oslo_concurrency.lockutils [None req-eb10086d-a8c6-46c3-9f57-3125ebe3729b tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Lock "f1e9a2e7-0fd3-4a89-8c33-bab6d1987230" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.617s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2064.348371] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e79126d-848a-4982-bc04-897f44bb8d8f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2064.378949] env[62405]: DEBUG nova.compute.manager [req-842bf099-d58d-4b18-998d-d52424040323 req-d8a538fc-bc51-4866-b56a-ae78786db4e4 service nova] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Detach interface failed, port_id=2026016a-87b1-42ae-a04f-d95c5fb37377, reason: Instance c39d9059-8da4-4c8d-99ab-d66b8445e7da could not be found. {{(pid=62405) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11483}} [ 2064.406402] env[62405]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-85d038a7-9dd6-4945-a31b-1020c97a98b8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2064.415594] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dc3a1b1-fb88-413f-9cd7-a0f39a9adace {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2064.446077] env[62405]: DEBUG nova.compute.manager [req-b1da4cc2-3863-41bc-8530-6c0c6f3b9d55 req-c42a373d-eee6-4a0b-82f8-c0ed335041b0 service nova] [instance: 58691f22-5acd-45db-b587-df784a000813] Detach interface failed, port_id=c38487c8-b41a-4c0c-8103-3392186dbdee, reason: Instance 58691f22-5acd-45db-b587-df784a000813 could not be found. {{(pid=62405) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11483}} [ 2064.506720] env[62405]: DEBUG nova.network.neutron [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec] Successfully created port: c50bcbe3-9e1f-4a25-a53f-4753d9b04dfe {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2064.553145] env[62405]: DEBUG oslo_vmware.api [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]523d710f-06ba-d0cb-a4e3-97c4122c83be, 'name': SearchDatastore_Task, 'duration_secs': 0.009255} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2064.553376] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2064.553628] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 7c74cae9-1607-4928-a927-f0c8b86f7698/7c74cae9-1607-4928-a927-f0c8b86f7698.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2064.553974] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7a939755-1371-4564-b96b-38b273a34b72 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2064.560408] env[62405]: DEBUG oslo_vmware.api [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Waiting for the task: (returnval){ [ 2064.560408] env[62405]: value = "task-1948197" [ 2064.560408] env[62405]: _type = "Task" [ 2064.560408] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2064.568317] env[62405]: DEBUG oslo_vmware.api [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1948197, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2064.590544] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 15718289-5c19-4c2d-a9d8-d30ce0d63c68] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 2064.691710] env[62405]: DEBUG nova.compute.manager [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2064.795412] env[62405]: DEBUG nova.compute.manager [req-97ba3bd8-d046-45f6-b6e6-b26ba0f4a4c5 req-0bafadb7-c0ca-46fa-82b0-9f1fbd52589e service nova] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] Received event network-changed-3d9e960f-b38a-4714-93c0-7ff8857554fe {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2064.795964] env[62405]: DEBUG nova.compute.manager [req-97ba3bd8-d046-45f6-b6e6-b26ba0f4a4c5 req-0bafadb7-c0ca-46fa-82b0-9f1fbd52589e service nova] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] Refreshing instance network info cache due to event network-changed-3d9e960f-b38a-4714-93c0-7ff8857554fe. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 2064.795964] env[62405]: DEBUG oslo_concurrency.lockutils [req-97ba3bd8-d046-45f6-b6e6-b26ba0f4a4c5 req-0bafadb7-c0ca-46fa-82b0-9f1fbd52589e service nova] Acquiring lock "refresh_cache-f1e9a2e7-0fd3-4a89-8c33-bab6d1987230" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2064.796165] env[62405]: DEBUG oslo_concurrency.lockutils [req-97ba3bd8-d046-45f6-b6e6-b26ba0f4a4c5 req-0bafadb7-c0ca-46fa-82b0-9f1fbd52589e service nova] Acquired lock "refresh_cache-f1e9a2e7-0fd3-4a89-8c33-bab6d1987230" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2064.796297] env[62405]: DEBUG nova.network.neutron [req-97ba3bd8-d046-45f6-b6e6-b26ba0f4a4c5 req-0bafadb7-c0ca-46fa-82b0-9f1fbd52589e service nova] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] Refreshing network info cache for port 3d9e960f-b38a-4714-93c0-7ff8857554fe {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2064.812652] env[62405]: INFO nova.compute.manager [-] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Took 1.46 seconds to deallocate network for instance. [ 2064.903925] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9ed6b14-c809-43da-8b84-8b6177bb57ae {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2064.913598] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e69c4503-d7dd-4291-b250-e8e0fcc12892 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2064.951892] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96b1814b-3e9a-49b4-83a5-a0bb36c8e030 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2064.960651] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9895339b-b991-45f5-ba7a-4a61c29eb70c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2064.977008] env[62405]: DEBUG nova.compute.provider_tree [None req-2dd168af-0bc3-4c07-9d00-1f1141995121 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2065.072574] env[62405]: DEBUG oslo_vmware.api [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1948197, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2065.093887] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 1b820a12-4ca5-4b89-9016-81ebac4f1c3b] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 2065.361323] env[62405]: INFO nova.compute.manager [None req-11bb5068-76a3-46e0-9fac-6acb19c8e9d8 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Took 0.55 seconds to detach 1 volumes for instance. [ 2065.481023] env[62405]: DEBUG nova.scheduler.client.report [None req-2dd168af-0bc3-4c07-9d00-1f1141995121 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2065.564280] env[62405]: DEBUG nova.network.neutron [req-97ba3bd8-d046-45f6-b6e6-b26ba0f4a4c5 req-0bafadb7-c0ca-46fa-82b0-9f1fbd52589e service nova] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] Updated VIF entry in instance network info cache for port 3d9e960f-b38a-4714-93c0-7ff8857554fe. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2065.566133] env[62405]: DEBUG nova.network.neutron [req-97ba3bd8-d046-45f6-b6e6-b26ba0f4a4c5 req-0bafadb7-c0ca-46fa-82b0-9f1fbd52589e service nova] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] Updating instance_info_cache with network_info: [{"id": "3d9e960f-b38a-4714-93c0-7ff8857554fe", "address": "fa:16:3e:4e:fa:e7", "network": {"id": "2019f333-b70a-4976-97ee-8748220e1f48", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-558435229-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.186", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "633b4e729a054bc69593b789af9ee070", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb143ef7-8271-4a8a-a4aa-8eba9a89f6a1", "external-id": "nsx-vlan-transportzone-504", "segmentation_id": 504, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d9e960f-b3", "ovs_interfaceid": "3d9e960f-b38a-4714-93c0-7ff8857554fe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2065.575709] env[62405]: DEBUG oslo_vmware.api [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1948197, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.530881} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2065.575978] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 7c74cae9-1607-4928-a927-f0c8b86f7698/7c74cae9-1607-4928-a927-f0c8b86f7698.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2065.576386] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 7c74cae9-1607-4928-a927-f0c8b86f7698] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2065.576470] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d64d63d7-5038-4425-80cb-a7b9ec3eb761 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.584477] env[62405]: DEBUG oslo_vmware.api [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Waiting for the task: (returnval){ [ 2065.584477] env[62405]: value = "task-1948198" [ 2065.584477] env[62405]: _type = "Task" [ 2065.584477] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2065.593700] env[62405]: DEBUG oslo_vmware.api [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1948198, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2065.597463] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: ca0ff947-1ae0-4f19-ae71-0784f2c20ebe] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 2065.707640] env[62405]: DEBUG nova.compute.manager [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2065.734503] env[62405]: DEBUG nova.virt.hardware [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2065.734756] env[62405]: DEBUG nova.virt.hardware [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2065.734918] env[62405]: DEBUG nova.virt.hardware [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2065.735115] env[62405]: DEBUG nova.virt.hardware [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2065.735264] env[62405]: DEBUG nova.virt.hardware [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2065.735519] env[62405]: DEBUG nova.virt.hardware [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2065.735741] env[62405]: DEBUG nova.virt.hardware [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2065.735905] env[62405]: DEBUG nova.virt.hardware [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2065.736090] env[62405]: DEBUG nova.virt.hardware [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2065.736259] env[62405]: DEBUG nova.virt.hardware [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2065.736433] env[62405]: DEBUG nova.virt.hardware [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2065.737411] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7f3436c-7055-4d90-a449-a159981055d5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.745248] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8add6e8c-d24d-4f74-afff-42e76e6e0e21 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.872075] env[62405]: DEBUG oslo_concurrency.lockutils [None req-11bb5068-76a3-46e0-9fac-6acb19c8e9d8 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2065.929142] env[62405]: DEBUG nova.compute.manager [req-f250803f-cfe6-4962-9074-e442c563ca3d req-937a06e9-22be-4ee9-b1b4-70899c011e71 service nova] [instance: 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec] Received event network-vif-plugged-c50bcbe3-9e1f-4a25-a53f-4753d9b04dfe {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2065.929323] env[62405]: DEBUG oslo_concurrency.lockutils [req-f250803f-cfe6-4962-9074-e442c563ca3d req-937a06e9-22be-4ee9-b1b4-70899c011e71 service nova] Acquiring lock "60ccb9f6-29ba-44eb-8cec-0d9b78c235ec-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2065.929537] env[62405]: DEBUG oslo_concurrency.lockutils [req-f250803f-cfe6-4962-9074-e442c563ca3d req-937a06e9-22be-4ee9-b1b4-70899c011e71 service nova] Lock "60ccb9f6-29ba-44eb-8cec-0d9b78c235ec-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2065.929718] env[62405]: DEBUG oslo_concurrency.lockutils [req-f250803f-cfe6-4962-9074-e442c563ca3d req-937a06e9-22be-4ee9-b1b4-70899c011e71 service nova] Lock "60ccb9f6-29ba-44eb-8cec-0d9b78c235ec-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2065.929909] env[62405]: DEBUG nova.compute.manager [req-f250803f-cfe6-4962-9074-e442c563ca3d req-937a06e9-22be-4ee9-b1b4-70899c011e71 service nova] [instance: 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec] No waiting events found dispatching network-vif-plugged-c50bcbe3-9e1f-4a25-a53f-4753d9b04dfe {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2065.930121] env[62405]: WARNING nova.compute.manager [req-f250803f-cfe6-4962-9074-e442c563ca3d req-937a06e9-22be-4ee9-b1b4-70899c011e71 service nova] [instance: 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec] Received unexpected event network-vif-plugged-c50bcbe3-9e1f-4a25-a53f-4753d9b04dfe for instance with vm_state building and task_state spawning. [ 2065.984918] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2dd168af-0bc3-4c07-9d00-1f1141995121 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.306s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2065.985476] env[62405]: DEBUG nova.compute.manager [None req-2dd168af-0bc3-4c07-9d00-1f1141995121 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] [instance: 34f4f278-bd4d-43f9-af83-adb48cfb0adc] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2065.988232] env[62405]: DEBUG oslo_concurrency.lockutils [None req-280b76ad-aa61-45bf-96d6-d80b2055c2c9 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.994s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2065.988581] env[62405]: DEBUG nova.objects.instance [None req-280b76ad-aa61-45bf-96d6-d80b2055c2c9 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Lazy-loading 'resources' on Instance uuid 58691f22-5acd-45db-b587-df784a000813 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2066.021433] env[62405]: DEBUG nova.network.neutron [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec] Successfully updated port: c50bcbe3-9e1f-4a25-a53f-4753d9b04dfe {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2066.071243] env[62405]: DEBUG oslo_concurrency.lockutils [req-97ba3bd8-d046-45f6-b6e6-b26ba0f4a4c5 req-0bafadb7-c0ca-46fa-82b0-9f1fbd52589e service nova] Releasing lock "refresh_cache-f1e9a2e7-0fd3-4a89-8c33-bab6d1987230" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2066.094796] env[62405]: DEBUG oslo_vmware.api [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1948198, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066599} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2066.095421] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 7c74cae9-1607-4928-a927-f0c8b86f7698] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2066.097027] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37f7b377-efd1-460c-8887-feb0c5721759 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.110579] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 1f8293f9-5fba-4bf4-bf7c-65837c1092a0] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 2066.121066] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 7c74cae9-1607-4928-a927-f0c8b86f7698] Reconfiguring VM instance instance-00000072 to attach disk [datastore1] 7c74cae9-1607-4928-a927-f0c8b86f7698/7c74cae9-1607-4928-a927-f0c8b86f7698.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2066.121208] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6f638a4c-8ee4-4229-af09-12b19ed88f7a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.142014] env[62405]: DEBUG oslo_vmware.api [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Waiting for the task: (returnval){ [ 2066.142014] env[62405]: value = "task-1948199" [ 2066.142014] env[62405]: _type = "Task" [ 2066.142014] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2066.151866] env[62405]: DEBUG oslo_vmware.api [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1948199, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2066.496384] env[62405]: DEBUG nova.compute.utils [None req-2dd168af-0bc3-4c07-9d00-1f1141995121 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2066.500812] env[62405]: DEBUG nova.compute.manager [None req-2dd168af-0bc3-4c07-9d00-1f1141995121 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] [instance: 34f4f278-bd4d-43f9-af83-adb48cfb0adc] Not allocating networking since 'none' was specified. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 2066.523957] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Acquiring lock "refresh_cache-60ccb9f6-29ba-44eb-8cec-0d9b78c235ec" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2066.524170] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Acquired lock "refresh_cache-60ccb9f6-29ba-44eb-8cec-0d9b78c235ec" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2066.524355] env[62405]: DEBUG nova.network.neutron [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2066.622118] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: ec0a05fc-4a11-4e07-a03c-e357a7a750ab] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 2066.654879] env[62405]: DEBUG oslo_vmware.api [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1948199, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2066.673515] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5577b02d-deaf-4835-ad50-c3b5e64e6d8f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.681419] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-423ed4a2-72b7-4cf3-b92c-b4b96cca4607 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.712489] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49ef6e96-9e4b-4b2e-b15f-8ec2a05acaa1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.720291] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea2d11c5-2456-4d63-99bb-189a32461040 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.733550] env[62405]: DEBUG nova.compute.provider_tree [None req-280b76ad-aa61-45bf-96d6-d80b2055c2c9 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2067.001873] env[62405]: DEBUG nova.compute.manager [None req-2dd168af-0bc3-4c07-9d00-1f1141995121 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] [instance: 34f4f278-bd4d-43f9-af83-adb48cfb0adc] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2067.056255] env[62405]: DEBUG nova.network.neutron [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2067.128245] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 556e1bca-f2f1-4200-96df-997d48ce5a15] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 2067.153276] env[62405]: DEBUG oslo_vmware.api [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1948199, 'name': ReconfigVM_Task, 'duration_secs': 0.753063} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2067.155027] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 7c74cae9-1607-4928-a927-f0c8b86f7698] Reconfigured VM instance instance-00000072 to attach disk [datastore1] 7c74cae9-1607-4928-a927-f0c8b86f7698/7c74cae9-1607-4928-a927-f0c8b86f7698.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2067.155027] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-128eb27e-79f5-4a95-a6d3-7ec550b121b2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.167063] env[62405]: DEBUG oslo_vmware.api [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Waiting for the task: (returnval){ [ 2067.167063] env[62405]: value = "task-1948200" [ 2067.167063] env[62405]: _type = "Task" [ 2067.167063] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2067.176593] env[62405]: DEBUG oslo_vmware.api [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1948200, 'name': Rename_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2067.190197] env[62405]: DEBUG nova.network.neutron [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec] Updating instance_info_cache with network_info: [{"id": "c50bcbe3-9e1f-4a25-a53f-4753d9b04dfe", "address": "fa:16:3e:4f:28:77", "network": {"id": "f9883b88-e1ff-4499-9214-4cc672383a10", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1580742860-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dd9a1a4650b34e388c50c7575cf09a7c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0a88b707-352e-4be7-b1d6-ad6074b40ed9", "external-id": "nsx-vlan-transportzone-789", "segmentation_id": 789, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc50bcbe3-9e", "ovs_interfaceid": "c50bcbe3-9e1f-4a25-a53f-4753d9b04dfe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2067.236765] env[62405]: DEBUG nova.scheduler.client.report [None req-280b76ad-aa61-45bf-96d6-d80b2055c2c9 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2067.631450] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: f269844b-a9b4-40a2-8ba4-a62ee59b4e40] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 2067.677269] env[62405]: DEBUG oslo_vmware.api [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1948200, 'name': Rename_Task, 'duration_secs': 0.132795} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2067.677547] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 7c74cae9-1607-4928-a927-f0c8b86f7698] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2067.677816] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a3f4fbf9-2b24-47f0-a666-99dfe5cb2877 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.683943] env[62405]: DEBUG oslo_vmware.api [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Waiting for the task: (returnval){ [ 2067.683943] env[62405]: value = "task-1948201" [ 2067.683943] env[62405]: _type = "Task" [ 2067.683943] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2067.691515] env[62405]: DEBUG oslo_vmware.api [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1948201, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2067.693020] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Releasing lock "refresh_cache-60ccb9f6-29ba-44eb-8cec-0d9b78c235ec" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2067.693346] env[62405]: DEBUG nova.compute.manager [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec] Instance network_info: |[{"id": "c50bcbe3-9e1f-4a25-a53f-4753d9b04dfe", "address": "fa:16:3e:4f:28:77", "network": {"id": "f9883b88-e1ff-4499-9214-4cc672383a10", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1580742860-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dd9a1a4650b34e388c50c7575cf09a7c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0a88b707-352e-4be7-b1d6-ad6074b40ed9", "external-id": "nsx-vlan-transportzone-789", "segmentation_id": 789, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc50bcbe3-9e", "ovs_interfaceid": "c50bcbe3-9e1f-4a25-a53f-4753d9b04dfe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2067.693880] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4f:28:77', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0a88b707-352e-4be7-b1d6-ad6074b40ed9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c50bcbe3-9e1f-4a25-a53f-4753d9b04dfe', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2067.701814] env[62405]: DEBUG oslo.service.loopingcall [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2067.702024] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2067.702263] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2ff6551d-e6f1-4c67-a7ef-76509a879c28 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.722362] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2067.722362] env[62405]: value = "task-1948202" [ 2067.722362] env[62405]: _type = "Task" [ 2067.722362] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2067.730024] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1948202, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2067.741967] env[62405]: DEBUG oslo_concurrency.lockutils [None req-280b76ad-aa61-45bf-96d6-d80b2055c2c9 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.754s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2067.744414] env[62405]: DEBUG oslo_concurrency.lockutils [None req-11bb5068-76a3-46e0-9fac-6acb19c8e9d8 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.872s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2067.744813] env[62405]: DEBUG nova.objects.instance [None req-11bb5068-76a3-46e0-9fac-6acb19c8e9d8 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Lazy-loading 'resources' on Instance uuid c39d9059-8da4-4c8d-99ab-d66b8445e7da {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2067.764853] env[62405]: INFO nova.scheduler.client.report [None req-280b76ad-aa61-45bf-96d6-d80b2055c2c9 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Deleted allocations for instance 58691f22-5acd-45db-b587-df784a000813 [ 2067.957149] env[62405]: DEBUG nova.compute.manager [req-51306664-f413-4626-a0f9-52f44281ecdf req-9573fea1-d405-444a-8c31-f045ab3f8dcb service nova] [instance: 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec] Received event network-changed-c50bcbe3-9e1f-4a25-a53f-4753d9b04dfe {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2067.957386] env[62405]: DEBUG nova.compute.manager [req-51306664-f413-4626-a0f9-52f44281ecdf req-9573fea1-d405-444a-8c31-f045ab3f8dcb service nova] [instance: 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec] Refreshing instance network info cache due to event network-changed-c50bcbe3-9e1f-4a25-a53f-4753d9b04dfe. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 2067.958040] env[62405]: DEBUG oslo_concurrency.lockutils [req-51306664-f413-4626-a0f9-52f44281ecdf req-9573fea1-d405-444a-8c31-f045ab3f8dcb service nova] Acquiring lock "refresh_cache-60ccb9f6-29ba-44eb-8cec-0d9b78c235ec" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2067.958040] env[62405]: DEBUG oslo_concurrency.lockutils [req-51306664-f413-4626-a0f9-52f44281ecdf req-9573fea1-d405-444a-8c31-f045ab3f8dcb service nova] Acquired lock "refresh_cache-60ccb9f6-29ba-44eb-8cec-0d9b78c235ec" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2067.958247] env[62405]: DEBUG nova.network.neutron [req-51306664-f413-4626-a0f9-52f44281ecdf req-9573fea1-d405-444a-8c31-f045ab3f8dcb service nova] [instance: 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec] Refreshing network info cache for port c50bcbe3-9e1f-4a25-a53f-4753d9b04dfe {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2068.012757] env[62405]: DEBUG nova.compute.manager [None req-2dd168af-0bc3-4c07-9d00-1f1141995121 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] [instance: 34f4f278-bd4d-43f9-af83-adb48cfb0adc] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2068.042504] env[62405]: DEBUG nova.virt.hardware [None req-2dd168af-0bc3-4c07-9d00-1f1141995121 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2068.042795] env[62405]: DEBUG nova.virt.hardware [None req-2dd168af-0bc3-4c07-9d00-1f1141995121 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2068.043073] env[62405]: DEBUG nova.virt.hardware [None req-2dd168af-0bc3-4c07-9d00-1f1141995121 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2068.043195] env[62405]: DEBUG nova.virt.hardware [None req-2dd168af-0bc3-4c07-9d00-1f1141995121 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2068.043357] env[62405]: DEBUG nova.virt.hardware [None req-2dd168af-0bc3-4c07-9d00-1f1141995121 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2068.043498] env[62405]: DEBUG nova.virt.hardware [None req-2dd168af-0bc3-4c07-9d00-1f1141995121 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2068.043709] env[62405]: DEBUG nova.virt.hardware [None req-2dd168af-0bc3-4c07-9d00-1f1141995121 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2068.043871] env[62405]: DEBUG nova.virt.hardware [None req-2dd168af-0bc3-4c07-9d00-1f1141995121 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2068.044059] env[62405]: DEBUG nova.virt.hardware [None req-2dd168af-0bc3-4c07-9d00-1f1141995121 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2068.044231] env[62405]: DEBUG nova.virt.hardware [None req-2dd168af-0bc3-4c07-9d00-1f1141995121 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2068.044410] env[62405]: DEBUG nova.virt.hardware [None req-2dd168af-0bc3-4c07-9d00-1f1141995121 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2068.045306] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a117bd7b-f182-4fe5-9508-d5cf88bdcfc9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.053627] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a13f595-c965-47af-b994-c54ea32df18d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.068098] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-2dd168af-0bc3-4c07-9d00-1f1141995121 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] [instance: 34f4f278-bd4d-43f9-af83-adb48cfb0adc] Instance VIF info [] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2068.073775] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-2dd168af-0bc3-4c07-9d00-1f1141995121 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Creating folder: Project (38404b347f6c40d19cfd264d79a1d827). Parent ref: group-v401284. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2068.074150] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8076d707-9b3e-41d1-9c05-e968b9c7ee0b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.084822] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-2dd168af-0bc3-4c07-9d00-1f1141995121 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Created folder: Project (38404b347f6c40d19cfd264d79a1d827) in parent group-v401284. [ 2068.085015] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-2dd168af-0bc3-4c07-9d00-1f1141995121 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Creating folder: Instances. Parent ref: group-v401593. {{(pid=62405) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2068.085270] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f7bf9e02-2a8b-4936-ab7c-57c563cc4d2e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.095051] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-2dd168af-0bc3-4c07-9d00-1f1141995121 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Created folder: Instances in parent group-v401593. [ 2068.095369] env[62405]: DEBUG oslo.service.loopingcall [None req-2dd168af-0bc3-4c07-9d00-1f1141995121 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2068.095520] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 34f4f278-bd4d-43f9-af83-adb48cfb0adc] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2068.095734] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-91ee9247-c161-42ad-8918-372ad4ef0b25 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.113271] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2068.113271] env[62405]: value = "task-1948205" [ 2068.113271] env[62405]: _type = "Task" [ 2068.113271] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2068.122825] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1948205, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2068.134615] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 3b9a6a82-a426-4802-9640-5b39e5e0ff49] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 2068.194528] env[62405]: DEBUG oslo_vmware.api [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1948201, 'name': PowerOnVM_Task, 'duration_secs': 0.47514} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2068.194871] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 7c74cae9-1607-4928-a927-f0c8b86f7698] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2068.195157] env[62405]: INFO nova.compute.manager [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 7c74cae9-1607-4928-a927-f0c8b86f7698] Took 8.50 seconds to spawn the instance on the hypervisor. [ 2068.195375] env[62405]: DEBUG nova.compute.manager [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 7c74cae9-1607-4928-a927-f0c8b86f7698] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2068.196260] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07085653-ebc3-4ef4-8a29-f4b98dd57567 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.234023] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1948202, 'name': CreateVM_Task, 'duration_secs': 0.368879} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2068.234126] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2068.234789] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2068.235042] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2068.235418] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2068.235704] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4571c83c-8838-492d-83f9-50617f449eae {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.240934] env[62405]: DEBUG oslo_vmware.api [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Waiting for the task: (returnval){ [ 2068.240934] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52d026c6-f65b-89cb-d77f-1a579e483f17" [ 2068.240934] env[62405]: _type = "Task" [ 2068.240934] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2068.251989] env[62405]: DEBUG oslo_vmware.api [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52d026c6-f65b-89cb-d77f-1a579e483f17, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2068.273952] env[62405]: DEBUG oslo_concurrency.lockutils [None req-280b76ad-aa61-45bf-96d6-d80b2055c2c9 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Lock "58691f22-5acd-45db-b587-df784a000813" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.945s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2068.403035] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3ceb122-3bad-4f4d-bb24-e11c24dcaf9c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.410820] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38f4b43d-c184-4f14-86ed-5d47a0c01a0c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.442423] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e672d1e-93e2-4494-9a84-ffbd8c649ddb {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.449803] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-befca1c8-9c45-436b-b107-0929d3232fc8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.464122] env[62405]: DEBUG nova.compute.provider_tree [None req-11bb5068-76a3-46e0-9fac-6acb19c8e9d8 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2068.622703] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1948205, 'name': CreateVM_Task, 'duration_secs': 0.255054} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2068.622903] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 34f4f278-bd4d-43f9-af83-adb48cfb0adc] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2068.623390] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2dd168af-0bc3-4c07-9d00-1f1141995121 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2068.638277] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 41e5385d-f0c7-4431-8424-e60dbeebaf8e] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 2068.660386] env[62405]: DEBUG nova.network.neutron [req-51306664-f413-4626-a0f9-52f44281ecdf req-9573fea1-d405-444a-8c31-f045ab3f8dcb service nova] [instance: 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec] Updated VIF entry in instance network info cache for port c50bcbe3-9e1f-4a25-a53f-4753d9b04dfe. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2068.660747] env[62405]: DEBUG nova.network.neutron [req-51306664-f413-4626-a0f9-52f44281ecdf req-9573fea1-d405-444a-8c31-f045ab3f8dcb service nova] [instance: 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec] Updating instance_info_cache with network_info: [{"id": "c50bcbe3-9e1f-4a25-a53f-4753d9b04dfe", "address": "fa:16:3e:4f:28:77", "network": {"id": "f9883b88-e1ff-4499-9214-4cc672383a10", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1580742860-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dd9a1a4650b34e388c50c7575cf09a7c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0a88b707-352e-4be7-b1d6-ad6074b40ed9", "external-id": "nsx-vlan-transportzone-789", "segmentation_id": 789, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc50bcbe3-9e", "ovs_interfaceid": "c50bcbe3-9e1f-4a25-a53f-4753d9b04dfe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2068.715311] env[62405]: INFO nova.compute.manager [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 7c74cae9-1607-4928-a927-f0c8b86f7698] Took 20.33 seconds to build instance. [ 2068.753044] env[62405]: DEBUG oslo_vmware.api [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52d026c6-f65b-89cb-d77f-1a579e483f17, 'name': SearchDatastore_Task, 'duration_secs': 0.012001} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2068.753044] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2068.753044] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2068.753241] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2068.753241] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2068.753392] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2068.753675] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2dd168af-0bc3-4c07-9d00-1f1141995121 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2068.753992] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2dd168af-0bc3-4c07-9d00-1f1141995121 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2068.754237] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6ea6f455-421b-479e-b957-28bb6cc6a298 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.755987] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7fe786fe-d28b-43e4-83ba-1ffb040b6fe1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.761175] env[62405]: DEBUG oslo_vmware.api [None req-2dd168af-0bc3-4c07-9d00-1f1141995121 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Waiting for the task: (returnval){ [ 2068.761175] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52ffe48b-1141-1330-7831-955e6a545dc8" [ 2068.761175] env[62405]: _type = "Task" [ 2068.761175] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2068.764932] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2068.765129] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2068.766111] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4259d230-2fe6-467b-b24b-6914b044c707 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.771114] env[62405]: DEBUG oslo_vmware.api [None req-2dd168af-0bc3-4c07-9d00-1f1141995121 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52ffe48b-1141-1330-7831-955e6a545dc8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2068.774131] env[62405]: DEBUG oslo_vmware.api [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Waiting for the task: (returnval){ [ 2068.774131] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52f37b6c-15f1-9c82-db9e-94babc09a5d6" [ 2068.774131] env[62405]: _type = "Task" [ 2068.774131] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2068.781543] env[62405]: DEBUG oslo_vmware.api [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52f37b6c-15f1-9c82-db9e-94babc09a5d6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2068.967515] env[62405]: DEBUG nova.scheduler.client.report [None req-11bb5068-76a3-46e0-9fac-6acb19c8e9d8 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2069.141384] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 78c3e3a3-3ff3-4f9d-ab0d-32a9a86bfc9d] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 2069.163355] env[62405]: DEBUG oslo_concurrency.lockutils [req-51306664-f413-4626-a0f9-52f44281ecdf req-9573fea1-d405-444a-8c31-f045ab3f8dcb service nova] Releasing lock "refresh_cache-60ccb9f6-29ba-44eb-8cec-0d9b78c235ec" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2069.217436] env[62405]: DEBUG oslo_concurrency.lockutils [None req-5587a345-b62d-4ff8-8dfd-a9a77e080dcf tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Lock "7c74cae9-1607-4928-a927-f0c8b86f7698" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.843s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2069.273324] env[62405]: DEBUG oslo_vmware.api [None req-2dd168af-0bc3-4c07-9d00-1f1141995121 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52ffe48b-1141-1330-7831-955e6a545dc8, 'name': SearchDatastore_Task, 'duration_secs': 0.009507} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2069.273324] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2dd168af-0bc3-4c07-9d00-1f1141995121 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2069.273324] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-2dd168af-0bc3-4c07-9d00-1f1141995121 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] [instance: 34f4f278-bd4d-43f9-af83-adb48cfb0adc] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2069.273324] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2dd168af-0bc3-4c07-9d00-1f1141995121 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2069.282544] env[62405]: DEBUG oslo_vmware.api [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52f37b6c-15f1-9c82-db9e-94babc09a5d6, 'name': SearchDatastore_Task, 'duration_secs': 0.007561} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2069.283284] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2da9881f-5787-4390-afe1-3d2b6dfb2fae {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2069.288701] env[62405]: DEBUG oslo_vmware.api [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Waiting for the task: (returnval){ [ 2069.288701] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]526c4945-e2a6-1946-8e78-a3bd21603347" [ 2069.288701] env[62405]: _type = "Task" [ 2069.288701] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2069.295730] env[62405]: DEBUG oslo_vmware.api [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]526c4945-e2a6-1946-8e78-a3bd21603347, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2069.333793] env[62405]: DEBUG oslo_concurrency.lockutils [None req-42955616-c215-4fc4-b03c-07e53959c079 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Acquiring lock "b2eae940-22bc-4c87-842f-30fbd04eba28" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2069.334037] env[62405]: DEBUG oslo_concurrency.lockutils [None req-42955616-c215-4fc4-b03c-07e53959c079 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Lock "b2eae940-22bc-4c87-842f-30fbd04eba28" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2069.334367] env[62405]: DEBUG oslo_concurrency.lockutils [None req-42955616-c215-4fc4-b03c-07e53959c079 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Acquiring lock "b2eae940-22bc-4c87-842f-30fbd04eba28-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2069.334443] env[62405]: DEBUG oslo_concurrency.lockutils [None req-42955616-c215-4fc4-b03c-07e53959c079 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Lock "b2eae940-22bc-4c87-842f-30fbd04eba28-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2069.334615] env[62405]: DEBUG oslo_concurrency.lockutils [None req-42955616-c215-4fc4-b03c-07e53959c079 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Lock "b2eae940-22bc-4c87-842f-30fbd04eba28-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2069.338618] env[62405]: INFO nova.compute.manager [None req-42955616-c215-4fc4-b03c-07e53959c079 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: b2eae940-22bc-4c87-842f-30fbd04eba28] Terminating instance [ 2069.439899] env[62405]: DEBUG nova.compute.manager [req-91f6eeef-fa7f-45b2-a18f-9878f023add6 req-d1d05fb0-7108-4445-8e5f-cdbeaeef7c39 service nova] [instance: 7c74cae9-1607-4928-a927-f0c8b86f7698] Received event network-changed-afd2e7f4-e21f-433e-ab9a-fb2e5d1e3993 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2069.440139] env[62405]: DEBUG nova.compute.manager [req-91f6eeef-fa7f-45b2-a18f-9878f023add6 req-d1d05fb0-7108-4445-8e5f-cdbeaeef7c39 service nova] [instance: 7c74cae9-1607-4928-a927-f0c8b86f7698] Refreshing instance network info cache due to event network-changed-afd2e7f4-e21f-433e-ab9a-fb2e5d1e3993. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 2069.440366] env[62405]: DEBUG oslo_concurrency.lockutils [req-91f6eeef-fa7f-45b2-a18f-9878f023add6 req-d1d05fb0-7108-4445-8e5f-cdbeaeef7c39 service nova] Acquiring lock "refresh_cache-7c74cae9-1607-4928-a927-f0c8b86f7698" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2069.440517] env[62405]: DEBUG oslo_concurrency.lockutils [req-91f6eeef-fa7f-45b2-a18f-9878f023add6 req-d1d05fb0-7108-4445-8e5f-cdbeaeef7c39 service nova] Acquired lock "refresh_cache-7c74cae9-1607-4928-a927-f0c8b86f7698" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2069.440711] env[62405]: DEBUG nova.network.neutron [req-91f6eeef-fa7f-45b2-a18f-9878f023add6 req-d1d05fb0-7108-4445-8e5f-cdbeaeef7c39 service nova] [instance: 7c74cae9-1607-4928-a927-f0c8b86f7698] Refreshing network info cache for port afd2e7f4-e21f-433e-ab9a-fb2e5d1e3993 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2069.473070] env[62405]: DEBUG oslo_concurrency.lockutils [None req-11bb5068-76a3-46e0-9fac-6acb19c8e9d8 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.729s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2069.498528] env[62405]: INFO nova.scheduler.client.report [None req-11bb5068-76a3-46e0-9fac-6acb19c8e9d8 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Deleted allocations for instance c39d9059-8da4-4c8d-99ab-d66b8445e7da [ 2069.644737] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 06dbb3e0-876e-4290-81f5-6f95f9d5cb37] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 2069.799727] env[62405]: DEBUG oslo_vmware.api [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]526c4945-e2a6-1946-8e78-a3bd21603347, 'name': SearchDatastore_Task, 'duration_secs': 0.017671} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2069.800370] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2069.800644] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec/60ccb9f6-29ba-44eb-8cec-0d9b78c235ec.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2069.800958] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2dd168af-0bc3-4c07-9d00-1f1141995121 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2069.801186] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-2dd168af-0bc3-4c07-9d00-1f1141995121 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2069.801408] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-809c3b3a-7cf1-4a69-a567-0bbd0db2a28c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2069.803259] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fbf3dded-619f-4de9-b7d8-192c1c8da714 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2069.809887] env[62405]: DEBUG oslo_vmware.api [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Waiting for the task: (returnval){ [ 2069.809887] env[62405]: value = "task-1948206" [ 2069.809887] env[62405]: _type = "Task" [ 2069.809887] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2069.813587] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-2dd168af-0bc3-4c07-9d00-1f1141995121 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2069.813726] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-2dd168af-0bc3-4c07-9d00-1f1141995121 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2069.814687] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7caddfd1-7b66-4944-8529-d962a4c8e0a6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2069.820044] env[62405]: DEBUG oslo_vmware.api [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948206, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2069.822788] env[62405]: DEBUG oslo_vmware.api [None req-2dd168af-0bc3-4c07-9d00-1f1141995121 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Waiting for the task: (returnval){ [ 2069.822788] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]527f4752-d131-4cf7-d71a-d7c7d40cdb53" [ 2069.822788] env[62405]: _type = "Task" [ 2069.822788] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2069.829559] env[62405]: DEBUG oslo_vmware.api [None req-2dd168af-0bc3-4c07-9d00-1f1141995121 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]527f4752-d131-4cf7-d71a-d7c7d40cdb53, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2069.842270] env[62405]: DEBUG nova.compute.manager [None req-42955616-c215-4fc4-b03c-07e53959c079 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: b2eae940-22bc-4c87-842f-30fbd04eba28] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2069.842467] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-42955616-c215-4fc4-b03c-07e53959c079 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: b2eae940-22bc-4c87-842f-30fbd04eba28] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2069.843197] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d9b8ae0-d3a2-4a5e-9f97-e85072c8094a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2069.849592] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-42955616-c215-4fc4-b03c-07e53959c079 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: b2eae940-22bc-4c87-842f-30fbd04eba28] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2069.849818] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dbe28a61-ab43-4a39-97a7-1fc646fe1be9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2069.855798] env[62405]: DEBUG oslo_vmware.api [None req-42955616-c215-4fc4-b03c-07e53959c079 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Waiting for the task: (returnval){ [ 2069.855798] env[62405]: value = "task-1948207" [ 2069.855798] env[62405]: _type = "Task" [ 2069.855798] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2069.862954] env[62405]: DEBUG oslo_vmware.api [None req-42955616-c215-4fc4-b03c-07e53959c079 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Task: {'id': task-1948207, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2070.008643] env[62405]: DEBUG oslo_concurrency.lockutils [None req-11bb5068-76a3-46e0-9fac-6acb19c8e9d8 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Lock "c39d9059-8da4-4c8d-99ab-d66b8445e7da" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.462s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2070.148565] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 08d7be6c-0557-46af-ae8d-e1c68e878cae] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 2070.319858] env[62405]: DEBUG oslo_vmware.api [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948206, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2070.331368] env[62405]: DEBUG oslo_vmware.api [None req-2dd168af-0bc3-4c07-9d00-1f1141995121 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]527f4752-d131-4cf7-d71a-d7c7d40cdb53, 'name': SearchDatastore_Task, 'duration_secs': 0.008782} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2070.332150] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-775283ae-231b-4afc-8b0e-c18b6c0fc01c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2070.337328] env[62405]: DEBUG oslo_vmware.api [None req-2dd168af-0bc3-4c07-9d00-1f1141995121 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Waiting for the task: (returnval){ [ 2070.337328] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5252bec1-0aab-20ee-7f54-89d10f4785a5" [ 2070.337328] env[62405]: _type = "Task" [ 2070.337328] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2070.341023] env[62405]: DEBUG nova.network.neutron [req-91f6eeef-fa7f-45b2-a18f-9878f023add6 req-d1d05fb0-7108-4445-8e5f-cdbeaeef7c39 service nova] [instance: 7c74cae9-1607-4928-a927-f0c8b86f7698] Updated VIF entry in instance network info cache for port afd2e7f4-e21f-433e-ab9a-fb2e5d1e3993. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2070.341373] env[62405]: DEBUG nova.network.neutron [req-91f6eeef-fa7f-45b2-a18f-9878f023add6 req-d1d05fb0-7108-4445-8e5f-cdbeaeef7c39 service nova] [instance: 7c74cae9-1607-4928-a927-f0c8b86f7698] Updating instance_info_cache with network_info: [{"id": "afd2e7f4-e21f-433e-ab9a-fb2e5d1e3993", "address": "fa:16:3e:98:99:8d", "network": {"id": "dfd15c21-b7a5-492a-afe3-8eb96515351d", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-175692276-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.156", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6014bab6bc9a4b059bab88e44b31f446", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "950a2f67-7668-4376-9d48-b38dca033c40", "external-id": "nsx-vlan-transportzone-549", "segmentation_id": 549, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapafd2e7f4-e2", "ovs_interfaceid": "afd2e7f4-e21f-433e-ab9a-fb2e5d1e3993", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2070.345133] env[62405]: DEBUG oslo_vmware.api [None req-2dd168af-0bc3-4c07-9d00-1f1141995121 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5252bec1-0aab-20ee-7f54-89d10f4785a5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2070.363943] env[62405]: DEBUG oslo_vmware.api [None req-42955616-c215-4fc4-b03c-07e53959c079 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Task: {'id': task-1948207, 'name': PowerOffVM_Task, 'duration_secs': 0.297934} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2070.364215] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-42955616-c215-4fc4-b03c-07e53959c079 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: b2eae940-22bc-4c87-842f-30fbd04eba28] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2070.364389] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-42955616-c215-4fc4-b03c-07e53959c079 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: b2eae940-22bc-4c87-842f-30fbd04eba28] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2070.364623] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5dc4cfbf-c431-449d-b71d-34d7230e871f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2070.652180] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 59fe34ab-c01d-4083-8bcd-ad6b4133a66f] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 2070.820699] env[62405]: DEBUG oslo_vmware.api [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948206, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.519976} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2070.820956] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec/60ccb9f6-29ba-44eb-8cec-0d9b78c235ec.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2070.821185] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2070.821446] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8c133a4a-b517-481a-8c44-78054c5cf057 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2070.828292] env[62405]: DEBUG oslo_vmware.api [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Waiting for the task: (returnval){ [ 2070.828292] env[62405]: value = "task-1948209" [ 2070.828292] env[62405]: _type = "Task" [ 2070.828292] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2070.836823] env[62405]: DEBUG oslo_vmware.api [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948209, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2070.847115] env[62405]: DEBUG oslo_concurrency.lockutils [req-91f6eeef-fa7f-45b2-a18f-9878f023add6 req-d1d05fb0-7108-4445-8e5f-cdbeaeef7c39 service nova] Releasing lock "refresh_cache-7c74cae9-1607-4928-a927-f0c8b86f7698" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2070.847516] env[62405]: DEBUG oslo_vmware.api [None req-2dd168af-0bc3-4c07-9d00-1f1141995121 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5252bec1-0aab-20ee-7f54-89d10f4785a5, 'name': SearchDatastore_Task, 'duration_secs': 0.01308} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2070.847767] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2dd168af-0bc3-4c07-9d00-1f1141995121 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2070.848044] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-2dd168af-0bc3-4c07-9d00-1f1141995121 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 34f4f278-bd4d-43f9-af83-adb48cfb0adc/34f4f278-bd4d-43f9-af83-adb48cfb0adc.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2070.848306] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8204ebd6-39c3-4075-9565-7401bc86821e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2070.854657] env[62405]: DEBUG oslo_vmware.api [None req-2dd168af-0bc3-4c07-9d00-1f1141995121 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Waiting for the task: (returnval){ [ 2070.854657] env[62405]: value = "task-1948210" [ 2070.854657] env[62405]: _type = "Task" [ 2070.854657] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2070.863480] env[62405]: DEBUG oslo_vmware.api [None req-2dd168af-0bc3-4c07-9d00-1f1141995121 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Task: {'id': task-1948210, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2070.992717] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-42955616-c215-4fc4-b03c-07e53959c079 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: b2eae940-22bc-4c87-842f-30fbd04eba28] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2070.992948] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-42955616-c215-4fc4-b03c-07e53959c079 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: b2eae940-22bc-4c87-842f-30fbd04eba28] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2070.993148] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-42955616-c215-4fc4-b03c-07e53959c079 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Deleting the datastore file [datastore1] b2eae940-22bc-4c87-842f-30fbd04eba28 {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2070.993424] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-39bb7b44-1fca-4266-9577-d13f1c58582f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2070.999919] env[62405]: DEBUG oslo_vmware.api [None req-42955616-c215-4fc4-b03c-07e53959c079 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Waiting for the task: (returnval){ [ 2070.999919] env[62405]: value = "task-1948211" [ 2070.999919] env[62405]: _type = "Task" [ 2070.999919] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2071.007916] env[62405]: DEBUG oslo_vmware.api [None req-42955616-c215-4fc4-b03c-07e53959c079 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Task: {'id': task-1948211, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2071.157222] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 8f133517-cff2-40c7-8333-a9116163313a] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 2071.337964] env[62405]: DEBUG oslo_vmware.api [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948209, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081749} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2071.340055] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2071.340055] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16c12bc2-1d89-44d9-bf70-0145ff750988 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2071.363800] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec] Reconfiguring VM instance instance-00000073 to attach disk [datastore1] 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec/60ccb9f6-29ba-44eb-8cec-0d9b78c235ec.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2071.367299] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a539da7a-d82b-4a4b-86de-454b10f47420 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2071.388322] env[62405]: DEBUG oslo_vmware.api [None req-2dd168af-0bc3-4c07-9d00-1f1141995121 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Task: {'id': task-1948210, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2071.389778] env[62405]: DEBUG oslo_vmware.api [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Waiting for the task: (returnval){ [ 2071.389778] env[62405]: value = "task-1948212" [ 2071.389778] env[62405]: _type = "Task" [ 2071.389778] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2071.397583] env[62405]: DEBUG oslo_vmware.api [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948212, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2071.472949] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Acquiring lock "fd311606-a314-4030-9d51-929993ab6b14" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2071.473206] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Lock "fd311606-a314-4030-9d51-929993ab6b14" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2071.509378] env[62405]: DEBUG oslo_vmware.api [None req-42955616-c215-4fc4-b03c-07e53959c079 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Task: {'id': task-1948211, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.493404} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2071.509637] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-42955616-c215-4fc4-b03c-07e53959c079 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2071.509881] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-42955616-c215-4fc4-b03c-07e53959c079 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: b2eae940-22bc-4c87-842f-30fbd04eba28] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2071.510092] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-42955616-c215-4fc4-b03c-07e53959c079 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: b2eae940-22bc-4c87-842f-30fbd04eba28] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2071.510285] env[62405]: INFO nova.compute.manager [None req-42955616-c215-4fc4-b03c-07e53959c079 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] [instance: b2eae940-22bc-4c87-842f-30fbd04eba28] Took 1.67 seconds to destroy the instance on the hypervisor. [ 2071.510548] env[62405]: DEBUG oslo.service.loopingcall [None req-42955616-c215-4fc4-b03c-07e53959c079 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2071.510771] env[62405]: DEBUG nova.compute.manager [-] [instance: b2eae940-22bc-4c87-842f-30fbd04eba28] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2071.510867] env[62405]: DEBUG nova.network.neutron [-] [instance: b2eae940-22bc-4c87-842f-30fbd04eba28] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2071.658716] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 81aebf11-5d80-4a86-b232-3ecc5f3892c2] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 2071.763851] env[62405]: DEBUG nova.compute.manager [req-2f47b966-4638-4f98-8876-4392facf81f6 req-825ba4c5-a6b4-43d1-87dd-0b22f361cbe3 service nova] [instance: b2eae940-22bc-4c87-842f-30fbd04eba28] Received event network-vif-deleted-14628f58-ebd5-4e11-8089-8c15cde335af {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2071.764065] env[62405]: INFO nova.compute.manager [req-2f47b966-4638-4f98-8876-4392facf81f6 req-825ba4c5-a6b4-43d1-87dd-0b22f361cbe3 service nova] [instance: b2eae940-22bc-4c87-842f-30fbd04eba28] Neutron deleted interface 14628f58-ebd5-4e11-8089-8c15cde335af; detaching it from the instance and deleting it from the info cache [ 2071.764248] env[62405]: DEBUG nova.network.neutron [req-2f47b966-4638-4f98-8876-4392facf81f6 req-825ba4c5-a6b4-43d1-87dd-0b22f361cbe3 service nova] [instance: b2eae940-22bc-4c87-842f-30fbd04eba28] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2071.864314] env[62405]: DEBUG oslo_vmware.api [None req-2dd168af-0bc3-4c07-9d00-1f1141995121 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Task: {'id': task-1948210, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.553283} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2071.864556] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-2dd168af-0bc3-4c07-9d00-1f1141995121 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 34f4f278-bd4d-43f9-af83-adb48cfb0adc/34f4f278-bd4d-43f9-af83-adb48cfb0adc.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2071.864764] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-2dd168af-0bc3-4c07-9d00-1f1141995121 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] [instance: 34f4f278-bd4d-43f9-af83-adb48cfb0adc] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2071.865019] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-41477dff-5f15-4f2a-9090-d993f126dec2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2071.871223] env[62405]: DEBUG oslo_vmware.api [None req-2dd168af-0bc3-4c07-9d00-1f1141995121 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Waiting for the task: (returnval){ [ 2071.871223] env[62405]: value = "task-1948213" [ 2071.871223] env[62405]: _type = "Task" [ 2071.871223] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2071.878669] env[62405]: DEBUG oslo_vmware.api [None req-2dd168af-0bc3-4c07-9d00-1f1141995121 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Task: {'id': task-1948213, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2071.900872] env[62405]: DEBUG oslo_vmware.api [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948212, 'name': ReconfigVM_Task, 'duration_secs': 0.452089} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2071.901256] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec] Reconfigured VM instance instance-00000073 to attach disk [datastore1] 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec/60ccb9f6-29ba-44eb-8cec-0d9b78c235ec.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2071.901920] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-860105c8-017d-4092-bb46-5ee58c842f65 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2071.908403] env[62405]: DEBUG oslo_vmware.api [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Waiting for the task: (returnval){ [ 2071.908403] env[62405]: value = "task-1948214" [ 2071.908403] env[62405]: _type = "Task" [ 2071.908403] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2071.916678] env[62405]: DEBUG oslo_vmware.api [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948214, 'name': Rename_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2071.975711] env[62405]: DEBUG nova.compute.manager [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: fd311606-a314-4030-9d51-929993ab6b14] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2072.162543] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 86378df0-a658-427d-aca5-de25f84eb28b] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 2072.242209] env[62405]: DEBUG nova.network.neutron [-] [instance: b2eae940-22bc-4c87-842f-30fbd04eba28] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2072.266793] env[62405]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b7a7192e-f62d-4b15-890d-8fd592e8c330 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2072.277494] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b78333a6-c095-4ba1-9985-8c6cb87a5dc6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2072.309073] env[62405]: DEBUG nova.compute.manager [req-2f47b966-4638-4f98-8876-4392facf81f6 req-825ba4c5-a6b4-43d1-87dd-0b22f361cbe3 service nova] [instance: b2eae940-22bc-4c87-842f-30fbd04eba28] Detach interface failed, port_id=14628f58-ebd5-4e11-8089-8c15cde335af, reason: Instance b2eae940-22bc-4c87-842f-30fbd04eba28 could not be found. {{(pid=62405) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11483}} [ 2072.380899] env[62405]: DEBUG oslo_vmware.api [None req-2dd168af-0bc3-4c07-9d00-1f1141995121 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Task: {'id': task-1948213, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082857} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2072.381196] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-2dd168af-0bc3-4c07-9d00-1f1141995121 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] [instance: 34f4f278-bd4d-43f9-af83-adb48cfb0adc] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2072.381947] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87f5f842-2789-450e-b196-c10de218bf36 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2072.400977] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-2dd168af-0bc3-4c07-9d00-1f1141995121 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] [instance: 34f4f278-bd4d-43f9-af83-adb48cfb0adc] Reconfiguring VM instance instance-00000074 to attach disk [datastore1] 34f4f278-bd4d-43f9-af83-adb48cfb0adc/34f4f278-bd4d-43f9-af83-adb48cfb0adc.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2072.401235] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-754657cd-7452-41c0-a990-2d110f90eda7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2072.422833] env[62405]: DEBUG oslo_vmware.api [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948214, 'name': Rename_Task, 'duration_secs': 0.181678} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2072.423976] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2072.424341] env[62405]: DEBUG oslo_vmware.api [None req-2dd168af-0bc3-4c07-9d00-1f1141995121 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Waiting for the task: (returnval){ [ 2072.424341] env[62405]: value = "task-1948215" [ 2072.424341] env[62405]: _type = "Task" [ 2072.424341] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2072.424523] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fcf56f81-6e9d-4d8d-9257-2f264ace4443 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2072.434012] env[62405]: DEBUG oslo_vmware.api [None req-2dd168af-0bc3-4c07-9d00-1f1141995121 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Task: {'id': task-1948215, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2072.434707] env[62405]: DEBUG oslo_vmware.api [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Waiting for the task: (returnval){ [ 2072.434707] env[62405]: value = "task-1948216" [ 2072.434707] env[62405]: _type = "Task" [ 2072.434707] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2072.441641] env[62405]: DEBUG oslo_vmware.api [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948216, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2072.502043] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2072.502043] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2072.504162] env[62405]: INFO nova.compute.claims [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: fd311606-a314-4030-9d51-929993ab6b14] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2072.666052] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 171910d2-02b8-4219-ae75-5cecccea1de3] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 2072.743965] env[62405]: INFO nova.compute.manager [-] [instance: b2eae940-22bc-4c87-842f-30fbd04eba28] Took 1.23 seconds to deallocate network for instance. [ 2072.935977] env[62405]: DEBUG oslo_vmware.api [None req-2dd168af-0bc3-4c07-9d00-1f1141995121 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Task: {'id': task-1948215, 'name': ReconfigVM_Task, 'duration_secs': 0.509659} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2072.940695] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-2dd168af-0bc3-4c07-9d00-1f1141995121 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] [instance: 34f4f278-bd4d-43f9-af83-adb48cfb0adc] Reconfigured VM instance instance-00000074 to attach disk [datastore1] 34f4f278-bd4d-43f9-af83-adb48cfb0adc/34f4f278-bd4d-43f9-af83-adb48cfb0adc.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2072.941323] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7067cc03-b579-446e-a668-ea0127527b43 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2072.949099] env[62405]: DEBUG oslo_vmware.api [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948216, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2072.950687] env[62405]: DEBUG oslo_vmware.api [None req-2dd168af-0bc3-4c07-9d00-1f1141995121 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Waiting for the task: (returnval){ [ 2072.950687] env[62405]: value = "task-1948217" [ 2072.950687] env[62405]: _type = "Task" [ 2072.950687] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2072.960377] env[62405]: DEBUG oslo_vmware.api [None req-2dd168af-0bc3-4c07-9d00-1f1141995121 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Task: {'id': task-1948217, 'name': Rename_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2073.169694] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 65cd4af4-30cf-4435-8f32-501db450905f] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 2073.250088] env[62405]: DEBUG oslo_concurrency.lockutils [None req-42955616-c215-4fc4-b03c-07e53959c079 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2073.445238] env[62405]: DEBUG oslo_vmware.api [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948216, 'name': PowerOnVM_Task, 'duration_secs': 0.615782} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2073.445513] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2073.445723] env[62405]: INFO nova.compute.manager [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec] Took 7.74 seconds to spawn the instance on the hypervisor. [ 2073.445900] env[62405]: DEBUG nova.compute.manager [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2073.446677] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f52743f-d01e-422e-877f-893f9d46f616 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2073.462230] env[62405]: DEBUG oslo_vmware.api [None req-2dd168af-0bc3-4c07-9d00-1f1141995121 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Task: {'id': task-1948217, 'name': Rename_Task, 'duration_secs': 0.450896} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2073.462462] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-2dd168af-0bc3-4c07-9d00-1f1141995121 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] [instance: 34f4f278-bd4d-43f9-af83-adb48cfb0adc] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2073.462674] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5729469c-659a-4b11-8e19-e450fae74d3a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2073.467843] env[62405]: DEBUG oslo_vmware.api [None req-2dd168af-0bc3-4c07-9d00-1f1141995121 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Waiting for the task: (returnval){ [ 2073.467843] env[62405]: value = "task-1948218" [ 2073.467843] env[62405]: _type = "Task" [ 2073.467843] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2073.474843] env[62405]: DEBUG oslo_vmware.api [None req-2dd168af-0bc3-4c07-9d00-1f1141995121 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Task: {'id': task-1948218, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2073.648935] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e98fb8fd-7934-46ad-8664-80f1e3410747 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2073.656249] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8c903a1-62cf-4a5a-9af8-abd7d4b8ebe0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2073.685743] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 271cec64-e7b4-4a1b-a7d6-f3fd60086209] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 2073.688182] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-604e0061-1a90-40af-a448-6099092e52da {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2073.695431] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec647d06-6872-4440-8f93-68a702f42c8f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2073.710017] env[62405]: DEBUG nova.compute.provider_tree [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2073.967882] env[62405]: INFO nova.compute.manager [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec] Took 22.18 seconds to build instance. [ 2073.977702] env[62405]: DEBUG oslo_vmware.api [None req-2dd168af-0bc3-4c07-9d00-1f1141995121 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Task: {'id': task-1948218, 'name': PowerOnVM_Task, 'duration_secs': 0.495057} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2073.977967] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-2dd168af-0bc3-4c07-9d00-1f1141995121 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] [instance: 34f4f278-bd4d-43f9-af83-adb48cfb0adc] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2073.978246] env[62405]: INFO nova.compute.manager [None req-2dd168af-0bc3-4c07-9d00-1f1141995121 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] [instance: 34f4f278-bd4d-43f9-af83-adb48cfb0adc] Took 5.97 seconds to spawn the instance on the hypervisor. [ 2073.978446] env[62405]: DEBUG nova.compute.manager [None req-2dd168af-0bc3-4c07-9d00-1f1141995121 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] [instance: 34f4f278-bd4d-43f9-af83-adb48cfb0adc] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2073.979330] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c370361-79f5-4eb1-b6b0-3ba3e0093ae1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2074.192045] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 73c5b28f-d21d-4ffc-9e67-911e4fb4db66] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 2074.229926] env[62405]: ERROR nova.scheduler.client.report [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [req-52f7a68f-9167-4fca-921f-929be89746a8] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7d5eded7-a501-4fa6-b1d3-60e273d555d7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-52f7a68f-9167-4fca-921f-929be89746a8"}]} [ 2074.253770] env[62405]: DEBUG nova.scheduler.client.report [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Refreshing inventories for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 2074.267714] env[62405]: DEBUG nova.scheduler.client.report [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Updating ProviderTree inventory for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 2074.268010] env[62405]: DEBUG nova.compute.provider_tree [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2074.279735] env[62405]: DEBUG nova.scheduler.client.report [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Refreshing aggregate associations for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7, aggregates: None {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 2074.298078] env[62405]: DEBUG nova.scheduler.client.report [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Refreshing trait associations for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 2074.455876] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cb83b25-50a9-4107-96bb-3526b18f0a12 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2074.463680] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58437bf7-4ff3-408a-9203-4139f8223a65 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2074.495357] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d3ff1748-df43-405d-b9bb-f4ac134f9ba5 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Lock "60ccb9f6-29ba-44eb-8cec-0d9b78c235ec" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.721s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2074.500196] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd1c3720-ea87-4acd-85ac-139d19cbc376 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2074.508930] env[62405]: INFO nova.compute.manager [None req-2dd168af-0bc3-4c07-9d00-1f1141995121 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] [instance: 34f4f278-bd4d-43f9-af83-adb48cfb0adc] Took 20.28 seconds to build instance. [ 2074.510806] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4335be18-d683-453c-bf12-ec498e20982b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2074.528237] env[62405]: DEBUG nova.compute.provider_tree [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2074.548541] env[62405]: DEBUG nova.compute.manager [req-fa88117e-6b9b-4859-be08-c5f11ed79310 req-adfb3149-e892-4cff-a744-a02e906532ab service nova] [instance: 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec] Received event network-changed-c50bcbe3-9e1f-4a25-a53f-4753d9b04dfe {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2074.549674] env[62405]: DEBUG nova.compute.manager [req-fa88117e-6b9b-4859-be08-c5f11ed79310 req-adfb3149-e892-4cff-a744-a02e906532ab service nova] [instance: 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec] Refreshing instance network info cache due to event network-changed-c50bcbe3-9e1f-4a25-a53f-4753d9b04dfe. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 2074.549674] env[62405]: DEBUG oslo_concurrency.lockutils [req-fa88117e-6b9b-4859-be08-c5f11ed79310 req-adfb3149-e892-4cff-a744-a02e906532ab service nova] Acquiring lock "refresh_cache-60ccb9f6-29ba-44eb-8cec-0d9b78c235ec" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2074.549674] env[62405]: DEBUG oslo_concurrency.lockutils [req-fa88117e-6b9b-4859-be08-c5f11ed79310 req-adfb3149-e892-4cff-a744-a02e906532ab service nova] Acquired lock "refresh_cache-60ccb9f6-29ba-44eb-8cec-0d9b78c235ec" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2074.549674] env[62405]: DEBUG nova.network.neutron [req-fa88117e-6b9b-4859-be08-c5f11ed79310 req-adfb3149-e892-4cff-a744-a02e906532ab service nova] [instance: 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec] Refreshing network info cache for port c50bcbe3-9e1f-4a25-a53f-4753d9b04dfe {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2074.695703] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: b4b89cf6-4159-40fa-8b67-4d8bbf16eb32] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 2075.018644] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2dd168af-0bc3-4c07-9d00-1f1141995121 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Lock "34f4f278-bd4d-43f9-af83-adb48cfb0adc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.797s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2075.059385] env[62405]: DEBUG nova.scheduler.client.report [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Updated inventory for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with generation 178 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 2075.059685] env[62405]: DEBUG nova.compute.provider_tree [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Updating resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 generation from 178 to 179 during operation: update_inventory {{(pid=62405) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2075.059951] env[62405]: DEBUG nova.compute.provider_tree [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2075.113012] env[62405]: INFO nova.compute.manager [None req-10d6fc18-bac1-40a8-9cc2-96b978b9edbe tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] [instance: 34f4f278-bd4d-43f9-af83-adb48cfb0adc] Rebuilding instance [ 2075.156158] env[62405]: DEBUG nova.compute.manager [None req-10d6fc18-bac1-40a8-9cc2-96b978b9edbe tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] [instance: 34f4f278-bd4d-43f9-af83-adb48cfb0adc] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2075.157203] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ba7d512-b211-477e-a461-a7f59b7045ca {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2075.198793] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 00158b10-4292-48f3-85a0-991af1dbc5f1] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 2075.335137] env[62405]: DEBUG nova.network.neutron [req-fa88117e-6b9b-4859-be08-c5f11ed79310 req-adfb3149-e892-4cff-a744-a02e906532ab service nova] [instance: 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec] Updated VIF entry in instance network info cache for port c50bcbe3-9e1f-4a25-a53f-4753d9b04dfe. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2075.335531] env[62405]: DEBUG nova.network.neutron [req-fa88117e-6b9b-4859-be08-c5f11ed79310 req-adfb3149-e892-4cff-a744-a02e906532ab service nova] [instance: 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec] Updating instance_info_cache with network_info: [{"id": "c50bcbe3-9e1f-4a25-a53f-4753d9b04dfe", "address": "fa:16:3e:4f:28:77", "network": {"id": "f9883b88-e1ff-4499-9214-4cc672383a10", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1580742860-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.211", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dd9a1a4650b34e388c50c7575cf09a7c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0a88b707-352e-4be7-b1d6-ad6074b40ed9", "external-id": "nsx-vlan-transportzone-789", "segmentation_id": 789, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc50bcbe3-9e", "ovs_interfaceid": "c50bcbe3-9e1f-4a25-a53f-4753d9b04dfe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2075.564495] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.063s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2075.565021] env[62405]: DEBUG nova.compute.manager [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: fd311606-a314-4030-9d51-929993ab6b14] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2075.567689] env[62405]: DEBUG oslo_concurrency.lockutils [None req-42955616-c215-4fc4-b03c-07e53959c079 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.318s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2075.567934] env[62405]: DEBUG nova.objects.instance [None req-42955616-c215-4fc4-b03c-07e53959c079 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Lazy-loading 'resources' on Instance uuid b2eae940-22bc-4c87-842f-30fbd04eba28 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2075.702990] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: d4d3e7b4-70cb-4842-861f-4e01b5ce6e6d] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 2075.839266] env[62405]: DEBUG oslo_concurrency.lockutils [req-fa88117e-6b9b-4859-be08-c5f11ed79310 req-adfb3149-e892-4cff-a744-a02e906532ab service nova] Releasing lock "refresh_cache-60ccb9f6-29ba-44eb-8cec-0d9b78c235ec" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2075.996638] env[62405]: DEBUG oslo_concurrency.lockutils [None req-cb6d4ec4-d9dc-435d-bc51-0c640fed14c2 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Acquiring lock "6fcfada3-d73a-4814-bf45-d34b26d76d4a" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2075.996900] env[62405]: DEBUG oslo_concurrency.lockutils [None req-cb6d4ec4-d9dc-435d-bc51-0c640fed14c2 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Lock "6fcfada3-d73a-4814-bf45-d34b26d76d4a" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2076.071222] env[62405]: DEBUG nova.compute.utils [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2076.076025] env[62405]: DEBUG nova.compute.manager [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: fd311606-a314-4030-9d51-929993ab6b14] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2076.076025] env[62405]: DEBUG nova.network.neutron [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: fd311606-a314-4030-9d51-929993ab6b14] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2076.115661] env[62405]: DEBUG nova.policy [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4ad5e220132245168b59ff3df599b974', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f3b50cc219314108945bfc8b2c21849a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 2076.174284] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-10d6fc18-bac1-40a8-9cc2-96b978b9edbe tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] [instance: 34f4f278-bd4d-43f9-af83-adb48cfb0adc] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2076.174678] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b8eaf67c-2b63-4f26-a904-342f30f19a9e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.182902] env[62405]: DEBUG oslo_vmware.api [None req-10d6fc18-bac1-40a8-9cc2-96b978b9edbe tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Waiting for the task: (returnval){ [ 2076.182902] env[62405]: value = "task-1948219" [ 2076.182902] env[62405]: _type = "Task" [ 2076.182902] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2076.191110] env[62405]: DEBUG oslo_vmware.api [None req-10d6fc18-bac1-40a8-9cc2-96b978b9edbe tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Task: {'id': task-1948219, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2076.208029] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: af174cbf-3555-42b0-bacd-033f9ff46f08] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 2076.254878] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dc5f487-29b7-4ac8-962d-763ed6fb8973 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.265401] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a140c11-f49e-4ebf-9e84-5db4190a5c4f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.295335] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67b1dc16-df24-44f5-9894-a2868742725f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.302691] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9197cfab-40c9-4b3c-9b5b-c0ab57006543 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.317133] env[62405]: DEBUG nova.compute.provider_tree [None req-42955616-c215-4fc4-b03c-07e53959c079 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2076.413644] env[62405]: DEBUG nova.network.neutron [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: fd311606-a314-4030-9d51-929993ab6b14] Successfully created port: 277de975-3957-41da-9e47-47a0be7e666f {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2076.501999] env[62405]: INFO nova.compute.manager [None req-cb6d4ec4-d9dc-435d-bc51-0c640fed14c2 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Detaching volume c64ac26e-4f56-4aad-931f-053141f488c8 [ 2076.537837] env[62405]: INFO nova.virt.block_device [None req-cb6d4ec4-d9dc-435d-bc51-0c640fed14c2 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Attempting to driver detach volume c64ac26e-4f56-4aad-931f-053141f488c8 from mountpoint /dev/sdb [ 2076.538458] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-cb6d4ec4-d9dc-435d-bc51-0c640fed14c2 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Volume detach. Driver type: vmdk {{(pid=62405) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2076.538884] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-cb6d4ec4-d9dc-435d-bc51-0c640fed14c2 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-401573', 'volume_id': 'c64ac26e-4f56-4aad-931f-053141f488c8', 'name': 'volume-c64ac26e-4f56-4aad-931f-053141f488c8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': '6fcfada3-d73a-4814-bf45-d34b26d76d4a', 'attached_at': '', 'detached_at': '', 'volume_id': 'c64ac26e-4f56-4aad-931f-053141f488c8', 'serial': 'c64ac26e-4f56-4aad-931f-053141f488c8'} {{(pid=62405) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2076.540234] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f85f662-8d0d-41d4-bd25-687698929397 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.566643] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63fdbab2-e1f9-4224-905c-54af5c8ed90b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.574069] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-029ae531-a7d9-4aef-a36b-2947d5189382 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.576876] env[62405]: DEBUG nova.compute.manager [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: fd311606-a314-4030-9d51-929993ab6b14] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2076.598737] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efa04c84-29f2-422e-a606-b960736f9656 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.613413] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-cb6d4ec4-d9dc-435d-bc51-0c640fed14c2 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] The volume has not been displaced from its original location: [datastore1] volume-c64ac26e-4f56-4aad-931f-053141f488c8/volume-c64ac26e-4f56-4aad-931f-053141f488c8.vmdk. No consolidation needed. {{(pid=62405) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2076.618953] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-cb6d4ec4-d9dc-435d-bc51-0c640fed14c2 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Reconfiguring VM instance instance-0000005d to detach disk 2001 {{(pid=62405) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2076.620331] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f88b13ed-74d6-470a-a166-53abf60aa4f2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.639884] env[62405]: DEBUG oslo_vmware.api [None req-cb6d4ec4-d9dc-435d-bc51-0c640fed14c2 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Waiting for the task: (returnval){ [ 2076.639884] env[62405]: value = "task-1948220" [ 2076.639884] env[62405]: _type = "Task" [ 2076.639884] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2076.648558] env[62405]: DEBUG oslo_vmware.api [None req-cb6d4ec4-d9dc-435d-bc51-0c640fed14c2 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948220, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2076.691917] env[62405]: DEBUG oslo_vmware.api [None req-10d6fc18-bac1-40a8-9cc2-96b978b9edbe tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Task: {'id': task-1948219, 'name': PowerOffVM_Task, 'duration_secs': 0.201792} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2076.692270] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-10d6fc18-bac1-40a8-9cc2-96b978b9edbe tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] [instance: 34f4f278-bd4d-43f9-af83-adb48cfb0adc] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2076.692548] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-10d6fc18-bac1-40a8-9cc2-96b978b9edbe tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] [instance: 34f4f278-bd4d-43f9-af83-adb48cfb0adc] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2076.693301] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89941e04-eaeb-4656-a501-e51fe6eb5feb {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.699430] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-10d6fc18-bac1-40a8-9cc2-96b978b9edbe tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] [instance: 34f4f278-bd4d-43f9-af83-adb48cfb0adc] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2076.699639] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-135aa750-6bdf-4c63-bca0-23b5b5459fff {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.711963] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 2ab5f28c-1f71-4bea-8733-523e5570f5c6] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 2076.724819] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-10d6fc18-bac1-40a8-9cc2-96b978b9edbe tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] [instance: 34f4f278-bd4d-43f9-af83-adb48cfb0adc] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2076.725123] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-10d6fc18-bac1-40a8-9cc2-96b978b9edbe tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] [instance: 34f4f278-bd4d-43f9-af83-adb48cfb0adc] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2076.725332] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-10d6fc18-bac1-40a8-9cc2-96b978b9edbe tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Deleting the datastore file [datastore1] 34f4f278-bd4d-43f9-af83-adb48cfb0adc {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2076.725571] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1f0753c1-3d49-4925-a3a4-621280ef481b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.731721] env[62405]: DEBUG oslo_vmware.api [None req-10d6fc18-bac1-40a8-9cc2-96b978b9edbe tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Waiting for the task: (returnval){ [ 2076.731721] env[62405]: value = "task-1948222" [ 2076.731721] env[62405]: _type = "Task" [ 2076.731721] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2076.742326] env[62405]: DEBUG oslo_vmware.api [None req-10d6fc18-bac1-40a8-9cc2-96b978b9edbe tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Task: {'id': task-1948222, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2076.823049] env[62405]: DEBUG nova.scheduler.client.report [None req-42955616-c215-4fc4-b03c-07e53959c079 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2077.150106] env[62405]: DEBUG oslo_vmware.api [None req-cb6d4ec4-d9dc-435d-bc51-0c640fed14c2 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948220, 'name': ReconfigVM_Task, 'duration_secs': 0.337624} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2077.150360] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-cb6d4ec4-d9dc-435d-bc51-0c640fed14c2 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Reconfigured VM instance instance-0000005d to detach disk 2001 {{(pid=62405) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2077.155151] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f89c8762-7812-47be-a140-6e70a70d3366 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.170268] env[62405]: DEBUG oslo_vmware.api [None req-cb6d4ec4-d9dc-435d-bc51-0c640fed14c2 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Waiting for the task: (returnval){ [ 2077.170268] env[62405]: value = "task-1948223" [ 2077.170268] env[62405]: _type = "Task" [ 2077.170268] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2077.177854] env[62405]: DEBUG oslo_vmware.api [None req-cb6d4ec4-d9dc-435d-bc51-0c640fed14c2 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948223, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2077.216909] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: f16e3d13-6db6-4f61-b0e4-661856a9166b] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 2077.241405] env[62405]: DEBUG oslo_vmware.api [None req-10d6fc18-bac1-40a8-9cc2-96b978b9edbe tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Task: {'id': task-1948222, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.104727} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2077.241647] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-10d6fc18-bac1-40a8-9cc2-96b978b9edbe tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2077.241836] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-10d6fc18-bac1-40a8-9cc2-96b978b9edbe tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] [instance: 34f4f278-bd4d-43f9-af83-adb48cfb0adc] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2077.242018] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-10d6fc18-bac1-40a8-9cc2-96b978b9edbe tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] [instance: 34f4f278-bd4d-43f9-af83-adb48cfb0adc] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2077.327614] env[62405]: DEBUG oslo_concurrency.lockutils [None req-42955616-c215-4fc4-b03c-07e53959c079 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.760s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2077.348901] env[62405]: INFO nova.scheduler.client.report [None req-42955616-c215-4fc4-b03c-07e53959c079 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Deleted allocations for instance b2eae940-22bc-4c87-842f-30fbd04eba28 [ 2077.586590] env[62405]: DEBUG nova.compute.manager [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: fd311606-a314-4030-9d51-929993ab6b14] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2077.612623] env[62405]: DEBUG nova.virt.hardware [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2077.612901] env[62405]: DEBUG nova.virt.hardware [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2077.613118] env[62405]: DEBUG nova.virt.hardware [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2077.613341] env[62405]: DEBUG nova.virt.hardware [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2077.613498] env[62405]: DEBUG nova.virt.hardware [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2077.613650] env[62405]: DEBUG nova.virt.hardware [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2077.613859] env[62405]: DEBUG nova.virt.hardware [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2077.614031] env[62405]: DEBUG nova.virt.hardware [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2077.614204] env[62405]: DEBUG nova.virt.hardware [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2077.614368] env[62405]: DEBUG nova.virt.hardware [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2077.614542] env[62405]: DEBUG nova.virt.hardware [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2077.615415] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa3ccbfb-e5b8-490c-8d26-f12f63221c62 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.623255] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-055516e4-dbd3-430b-b7d2-c15de7940d1f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.679062] env[62405]: DEBUG oslo_vmware.api [None req-cb6d4ec4-d9dc-435d-bc51-0c640fed14c2 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948223, 'name': ReconfigVM_Task, 'duration_secs': 0.20411} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2077.679516] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-cb6d4ec4-d9dc-435d-bc51-0c640fed14c2 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-401573', 'volume_id': 'c64ac26e-4f56-4aad-931f-053141f488c8', 'name': 'volume-c64ac26e-4f56-4aad-931f-053141f488c8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': '6fcfada3-d73a-4814-bf45-d34b26d76d4a', 'attached_at': '', 'detached_at': '', 'volume_id': 'c64ac26e-4f56-4aad-931f-053141f488c8', 'serial': 'c64ac26e-4f56-4aad-931f-053141f488c8'} {{(pid=62405) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2077.720150] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: ff8731d6-3c55-4ddc-aeb1-308d72313881] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 2077.856757] env[62405]: DEBUG oslo_concurrency.lockutils [None req-42955616-c215-4fc4-b03c-07e53959c079 tempest-ServerRescueTestJSON-1180955202 tempest-ServerRescueTestJSON-1180955202-project-member] Lock "b2eae940-22bc-4c87-842f-30fbd04eba28" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.523s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2078.222626] env[62405]: DEBUG nova.objects.instance [None req-cb6d4ec4-d9dc-435d-bc51-0c640fed14c2 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Lazy-loading 'flavor' on Instance uuid 6fcfada3-d73a-4814-bf45-d34b26d76d4a {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2078.224126] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 9d97bf1d-6830-48b1-831b-bf2b52188f32] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 2078.276242] env[62405]: DEBUG nova.virt.hardware [None req-10d6fc18-bac1-40a8-9cc2-96b978b9edbe tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2078.276544] env[62405]: DEBUG nova.virt.hardware [None req-10d6fc18-bac1-40a8-9cc2-96b978b9edbe tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2078.276714] env[62405]: DEBUG nova.virt.hardware [None req-10d6fc18-bac1-40a8-9cc2-96b978b9edbe tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2078.276900] env[62405]: DEBUG nova.virt.hardware [None req-10d6fc18-bac1-40a8-9cc2-96b978b9edbe tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2078.277063] env[62405]: DEBUG nova.virt.hardware [None req-10d6fc18-bac1-40a8-9cc2-96b978b9edbe tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2078.277216] env[62405]: DEBUG nova.virt.hardware [None req-10d6fc18-bac1-40a8-9cc2-96b978b9edbe tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2078.277424] env[62405]: DEBUG nova.virt.hardware [None req-10d6fc18-bac1-40a8-9cc2-96b978b9edbe tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2078.277590] env[62405]: DEBUG nova.virt.hardware [None req-10d6fc18-bac1-40a8-9cc2-96b978b9edbe tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2078.277763] env[62405]: DEBUG nova.virt.hardware [None req-10d6fc18-bac1-40a8-9cc2-96b978b9edbe tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2078.277988] env[62405]: DEBUG nova.virt.hardware [None req-10d6fc18-bac1-40a8-9cc2-96b978b9edbe tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2078.278193] env[62405]: DEBUG nova.virt.hardware [None req-10d6fc18-bac1-40a8-9cc2-96b978b9edbe tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2078.279669] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71f9cd09-4b63-40e7-9a15-a61013d0ab7b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2078.288117] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5242ecf6-1713-4571-9838-a295a3892395 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2078.303307] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-10d6fc18-bac1-40a8-9cc2-96b978b9edbe tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] [instance: 34f4f278-bd4d-43f9-af83-adb48cfb0adc] Instance VIF info [] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2078.308970] env[62405]: DEBUG oslo.service.loopingcall [None req-10d6fc18-bac1-40a8-9cc2-96b978b9edbe tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2078.309250] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 34f4f278-bd4d-43f9-af83-adb48cfb0adc] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2078.309469] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cd27ef96-d3e0-4610-b5c8-8dd59ec0ee32 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2078.327488] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2078.327488] env[62405]: value = "task-1948224" [ 2078.327488] env[62405]: _type = "Task" [ 2078.327488] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2078.335388] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1948224, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2078.729048] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 153adb6e-5381-4e91-881e-8e566a16905a] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 2078.838033] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1948224, 'name': CreateVM_Task, 'duration_secs': 0.255073} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2078.838207] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 34f4f278-bd4d-43f9-af83-adb48cfb0adc] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2078.838630] env[62405]: DEBUG oslo_concurrency.lockutils [None req-10d6fc18-bac1-40a8-9cc2-96b978b9edbe tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2078.838796] env[62405]: DEBUG oslo_concurrency.lockutils [None req-10d6fc18-bac1-40a8-9cc2-96b978b9edbe tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2078.839230] env[62405]: DEBUG oslo_concurrency.lockutils [None req-10d6fc18-bac1-40a8-9cc2-96b978b9edbe tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2078.839488] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-47dfef45-2094-41ee-9c9b-7987070bdf00 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2078.844725] env[62405]: DEBUG oslo_vmware.api [None req-10d6fc18-bac1-40a8-9cc2-96b978b9edbe tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Waiting for the task: (returnval){ [ 2078.844725] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52a06965-1713-dda4-359f-77d4e185e741" [ 2078.844725] env[62405]: _type = "Task" [ 2078.844725] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2078.852322] env[62405]: DEBUG oslo_vmware.api [None req-10d6fc18-bac1-40a8-9cc2-96b978b9edbe tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52a06965-1713-dda4-359f-77d4e185e741, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2079.232962] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 48554024-9b6f-44be-b21e-615b25cd790c] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 2079.235103] env[62405]: DEBUG oslo_concurrency.lockutils [None req-cb6d4ec4-d9dc-435d-bc51-0c640fed14c2 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Lock "6fcfada3-d73a-4814-bf45-d34b26d76d4a" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.238s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2079.355136] env[62405]: DEBUG oslo_vmware.api [None req-10d6fc18-bac1-40a8-9cc2-96b978b9edbe tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52a06965-1713-dda4-359f-77d4e185e741, 'name': SearchDatastore_Task, 'duration_secs': 0.022462} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2079.355501] env[62405]: DEBUG oslo_concurrency.lockutils [None req-10d6fc18-bac1-40a8-9cc2-96b978b9edbe tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2079.355752] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-10d6fc18-bac1-40a8-9cc2-96b978b9edbe tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] [instance: 34f4f278-bd4d-43f9-af83-adb48cfb0adc] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2079.356012] env[62405]: DEBUG oslo_concurrency.lockutils [None req-10d6fc18-bac1-40a8-9cc2-96b978b9edbe tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2079.356185] env[62405]: DEBUG oslo_concurrency.lockutils [None req-10d6fc18-bac1-40a8-9cc2-96b978b9edbe tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2079.356372] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-10d6fc18-bac1-40a8-9cc2-96b978b9edbe tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2079.356662] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6776b5ef-8a66-4183-80b4-ebfba0b603d4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2079.365057] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-10d6fc18-bac1-40a8-9cc2-96b978b9edbe tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2079.365238] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-10d6fc18-bac1-40a8-9cc2-96b978b9edbe tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2079.365923] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e92d838e-7b7a-4761-83d0-04f196aa38ad {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2079.370871] env[62405]: DEBUG oslo_vmware.api [None req-10d6fc18-bac1-40a8-9cc2-96b978b9edbe tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Waiting for the task: (returnval){ [ 2079.370871] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52af5c7a-dfad-4e3e-61ec-599db8c58225" [ 2079.370871] env[62405]: _type = "Task" [ 2079.370871] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2079.378448] env[62405]: DEBUG oslo_vmware.api [None req-10d6fc18-bac1-40a8-9cc2-96b978b9edbe tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52af5c7a-dfad-4e3e-61ec-599db8c58225, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2079.737375] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 9aa9e0de-7314-4d8b-8e9f-b6d330cae914] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 2079.881504] env[62405]: DEBUG oslo_vmware.api [None req-10d6fc18-bac1-40a8-9cc2-96b978b9edbe tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52af5c7a-dfad-4e3e-61ec-599db8c58225, 'name': SearchDatastore_Task, 'duration_secs': 0.00786} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2079.882392] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4a515159-52aa-4d3a-8e4b-69228239ae5c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2079.888237] env[62405]: DEBUG oslo_vmware.api [None req-10d6fc18-bac1-40a8-9cc2-96b978b9edbe tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Waiting for the task: (returnval){ [ 2079.888237] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]522123cd-6ca8-f865-5599-a9d785e0153c" [ 2079.888237] env[62405]: _type = "Task" [ 2079.888237] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2079.896214] env[62405]: DEBUG oslo_vmware.api [None req-10d6fc18-bac1-40a8-9cc2-96b978b9edbe tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]522123cd-6ca8-f865-5599-a9d785e0153c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2080.241353] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 7256b956-e41a-40ec-a687-a129a8bafcb6] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 2080.256984] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d15ecd0b-3041-44f2-82e6-8d58165672ba tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Acquiring lock "6fcfada3-d73a-4814-bf45-d34b26d76d4a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2080.257252] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d15ecd0b-3041-44f2-82e6-8d58165672ba tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Lock "6fcfada3-d73a-4814-bf45-d34b26d76d4a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2080.257473] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d15ecd0b-3041-44f2-82e6-8d58165672ba tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Acquiring lock "6fcfada3-d73a-4814-bf45-d34b26d76d4a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2080.257663] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d15ecd0b-3041-44f2-82e6-8d58165672ba tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Lock "6fcfada3-d73a-4814-bf45-d34b26d76d4a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2080.257857] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d15ecd0b-3041-44f2-82e6-8d58165672ba tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Lock "6fcfada3-d73a-4814-bf45-d34b26d76d4a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2080.259929] env[62405]: INFO nova.compute.manager [None req-d15ecd0b-3041-44f2-82e6-8d58165672ba tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Terminating instance [ 2080.398799] env[62405]: DEBUG oslo_vmware.api [None req-10d6fc18-bac1-40a8-9cc2-96b978b9edbe tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]522123cd-6ca8-f865-5599-a9d785e0153c, 'name': SearchDatastore_Task, 'duration_secs': 0.009366} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2080.399062] env[62405]: DEBUG oslo_concurrency.lockutils [None req-10d6fc18-bac1-40a8-9cc2-96b978b9edbe tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2080.399512] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-10d6fc18-bac1-40a8-9cc2-96b978b9edbe tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 34f4f278-bd4d-43f9-af83-adb48cfb0adc/34f4f278-bd4d-43f9-af83-adb48cfb0adc.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2080.399639] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-625b3335-fd42-4ac0-8dcc-b0268c894642 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2080.406943] env[62405]: DEBUG oslo_vmware.api [None req-10d6fc18-bac1-40a8-9cc2-96b978b9edbe tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Waiting for the task: (returnval){ [ 2080.406943] env[62405]: value = "task-1948225" [ 2080.406943] env[62405]: _type = "Task" [ 2080.406943] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2080.417738] env[62405]: DEBUG oslo_vmware.api [None req-10d6fc18-bac1-40a8-9cc2-96b978b9edbe tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Task: {'id': task-1948225, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2080.440997] env[62405]: DEBUG nova.network.neutron [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: fd311606-a314-4030-9d51-929993ab6b14] Successfully updated port: 277de975-3957-41da-9e47-47a0be7e666f {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2080.474903] env[62405]: DEBUG nova.compute.manager [req-43cd21f2-1709-4fb5-9415-0c5e997f155c req-b3d0965a-baa7-477b-8a1b-bbdc80a2af5b service nova] [instance: fd311606-a314-4030-9d51-929993ab6b14] Received event network-vif-plugged-277de975-3957-41da-9e47-47a0be7e666f {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2080.474903] env[62405]: DEBUG oslo_concurrency.lockutils [req-43cd21f2-1709-4fb5-9415-0c5e997f155c req-b3d0965a-baa7-477b-8a1b-bbdc80a2af5b service nova] Acquiring lock "fd311606-a314-4030-9d51-929993ab6b14-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2080.475293] env[62405]: DEBUG oslo_concurrency.lockutils [req-43cd21f2-1709-4fb5-9415-0c5e997f155c req-b3d0965a-baa7-477b-8a1b-bbdc80a2af5b service nova] Lock "fd311606-a314-4030-9d51-929993ab6b14-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2080.475411] env[62405]: DEBUG oslo_concurrency.lockutils [req-43cd21f2-1709-4fb5-9415-0c5e997f155c req-b3d0965a-baa7-477b-8a1b-bbdc80a2af5b service nova] Lock "fd311606-a314-4030-9d51-929993ab6b14-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2080.475630] env[62405]: DEBUG nova.compute.manager [req-43cd21f2-1709-4fb5-9415-0c5e997f155c req-b3d0965a-baa7-477b-8a1b-bbdc80a2af5b service nova] [instance: fd311606-a314-4030-9d51-929993ab6b14] No waiting events found dispatching network-vif-plugged-277de975-3957-41da-9e47-47a0be7e666f {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2080.475835] env[62405]: WARNING nova.compute.manager [req-43cd21f2-1709-4fb5-9415-0c5e997f155c req-b3d0965a-baa7-477b-8a1b-bbdc80a2af5b service nova] [instance: fd311606-a314-4030-9d51-929993ab6b14] Received unexpected event network-vif-plugged-277de975-3957-41da-9e47-47a0be7e666f for instance with vm_state building and task_state spawning. [ 2080.744473] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: f410acd2-f786-43bd-ad60-0a6248dedb1c] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 2080.766965] env[62405]: DEBUG nova.compute.manager [None req-d15ecd0b-3041-44f2-82e6-8d58165672ba tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2080.767587] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-d15ecd0b-3041-44f2-82e6-8d58165672ba tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2080.768186] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50457019-7364-4388-9f95-4107d3ed7761 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2080.778463] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-d15ecd0b-3041-44f2-82e6-8d58165672ba tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2080.778679] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9d4f2bec-3e5a-420f-8cc0-f477a4325eff {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2080.786672] env[62405]: DEBUG oslo_vmware.api [None req-d15ecd0b-3041-44f2-82e6-8d58165672ba tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Waiting for the task: (returnval){ [ 2080.786672] env[62405]: value = "task-1948226" [ 2080.786672] env[62405]: _type = "Task" [ 2080.786672] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2080.795622] env[62405]: DEBUG oslo_vmware.api [None req-d15ecd0b-3041-44f2-82e6-8d58165672ba tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948226, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2080.919654] env[62405]: DEBUG oslo_vmware.api [None req-10d6fc18-bac1-40a8-9cc2-96b978b9edbe tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Task: {'id': task-1948225, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.511134} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2080.919946] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-10d6fc18-bac1-40a8-9cc2-96b978b9edbe tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 34f4f278-bd4d-43f9-af83-adb48cfb0adc/34f4f278-bd4d-43f9-af83-adb48cfb0adc.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2080.920192] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-10d6fc18-bac1-40a8-9cc2-96b978b9edbe tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] [instance: 34f4f278-bd4d-43f9-af83-adb48cfb0adc] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2080.920460] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-77688091-756f-45b0-80f2-f107cca10a1b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2080.927547] env[62405]: DEBUG oslo_vmware.api [None req-10d6fc18-bac1-40a8-9cc2-96b978b9edbe tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Waiting for the task: (returnval){ [ 2080.927547] env[62405]: value = "task-1948227" [ 2080.927547] env[62405]: _type = "Task" [ 2080.927547] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2080.936259] env[62405]: DEBUG oslo_vmware.api [None req-10d6fc18-bac1-40a8-9cc2-96b978b9edbe tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Task: {'id': task-1948227, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2080.945248] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Acquiring lock "refresh_cache-fd311606-a314-4030-9d51-929993ab6b14" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2080.945559] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Acquired lock "refresh_cache-fd311606-a314-4030-9d51-929993ab6b14" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2080.945850] env[62405]: DEBUG nova.network.neutron [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: fd311606-a314-4030-9d51-929993ab6b14] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2081.248122] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 742c8d94-48d1-4408-91dc-98f25661aa8d] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 2081.296292] env[62405]: DEBUG oslo_vmware.api [None req-d15ecd0b-3041-44f2-82e6-8d58165672ba tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948226, 'name': PowerOffVM_Task, 'duration_secs': 0.441178} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2081.296606] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-d15ecd0b-3041-44f2-82e6-8d58165672ba tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2081.296826] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-d15ecd0b-3041-44f2-82e6-8d58165672ba tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2081.297116] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1627637d-f2c4-41d8-8b47-79405d1bf259 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2081.437025] env[62405]: DEBUG oslo_vmware.api [None req-10d6fc18-bac1-40a8-9cc2-96b978b9edbe tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Task: {'id': task-1948227, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079026} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2081.437256] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-10d6fc18-bac1-40a8-9cc2-96b978b9edbe tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] [instance: 34f4f278-bd4d-43f9-af83-adb48cfb0adc] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2081.438006] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2512d8a-f9f9-43b6-a628-da5594b75b6b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2081.458685] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-10d6fc18-bac1-40a8-9cc2-96b978b9edbe tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] [instance: 34f4f278-bd4d-43f9-af83-adb48cfb0adc] Reconfiguring VM instance instance-00000074 to attach disk [datastore1] 34f4f278-bd4d-43f9-af83-adb48cfb0adc/34f4f278-bd4d-43f9-af83-adb48cfb0adc.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2081.458955] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bd9b1494-b2af-467c-bd30-ac21532af9bd {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2081.477709] env[62405]: DEBUG oslo_vmware.api [None req-10d6fc18-bac1-40a8-9cc2-96b978b9edbe tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Waiting for the task: (returnval){ [ 2081.477709] env[62405]: value = "task-1948229" [ 2081.477709] env[62405]: _type = "Task" [ 2081.477709] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2081.486660] env[62405]: DEBUG oslo_vmware.api [None req-10d6fc18-bac1-40a8-9cc2-96b978b9edbe tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Task: {'id': task-1948229, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2081.498924] env[62405]: DEBUG nova.network.neutron [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: fd311606-a314-4030-9d51-929993ab6b14] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2081.628667] env[62405]: DEBUG nova.network.neutron [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: fd311606-a314-4030-9d51-929993ab6b14] Updating instance_info_cache with network_info: [{"id": "277de975-3957-41da-9e47-47a0be7e666f", "address": "fa:16:3e:7d:f0:f1", "network": {"id": "3ca0a5a2-a18f-47fa-9d7b-0e27aa414878", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1312490542-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f3b50cc219314108945bfc8b2c21849a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7edb7c08-2fae-4df5-9ec6-5ccf06d7e337", "external-id": "nsx-vlan-transportzone-309", "segmentation_id": 309, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap277de975-39", "ovs_interfaceid": "277de975-3957-41da-9e47-47a0be7e666f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2081.751320] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 9b21fa71-8a0e-446a-9492-59e2b068237c] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 2081.878413] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-d15ecd0b-3041-44f2-82e6-8d58165672ba tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2081.878667] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-d15ecd0b-3041-44f2-82e6-8d58165672ba tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2081.878818] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-d15ecd0b-3041-44f2-82e6-8d58165672ba tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Deleting the datastore file [datastore1] 6fcfada3-d73a-4814-bf45-d34b26d76d4a {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2081.879110] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-be1a9b54-d9c6-43e4-a607-792154033425 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2081.886301] env[62405]: DEBUG oslo_vmware.api [None req-d15ecd0b-3041-44f2-82e6-8d58165672ba tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Waiting for the task: (returnval){ [ 2081.886301] env[62405]: value = "task-1948230" [ 2081.886301] env[62405]: _type = "Task" [ 2081.886301] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2081.894353] env[62405]: DEBUG oslo_vmware.api [None req-d15ecd0b-3041-44f2-82e6-8d58165672ba tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948230, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2081.987337] env[62405]: DEBUG oslo_vmware.api [None req-10d6fc18-bac1-40a8-9cc2-96b978b9edbe tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Task: {'id': task-1948229, 'name': ReconfigVM_Task, 'duration_secs': 0.311596} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2081.987623] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-10d6fc18-bac1-40a8-9cc2-96b978b9edbe tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] [instance: 34f4f278-bd4d-43f9-af83-adb48cfb0adc] Reconfigured VM instance instance-00000074 to attach disk [datastore1] 34f4f278-bd4d-43f9-af83-adb48cfb0adc/34f4f278-bd4d-43f9-af83-adb48cfb0adc.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2081.988356] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2b173d7e-fea0-4437-929e-aeb0a0551466 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2081.995087] env[62405]: DEBUG oslo_vmware.api [None req-10d6fc18-bac1-40a8-9cc2-96b978b9edbe tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Waiting for the task: (returnval){ [ 2081.995087] env[62405]: value = "task-1948231" [ 2081.995087] env[62405]: _type = "Task" [ 2081.995087] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2082.003428] env[62405]: DEBUG oslo_vmware.api [None req-10d6fc18-bac1-40a8-9cc2-96b978b9edbe tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Task: {'id': task-1948231, 'name': Rename_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2082.131958] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Releasing lock "refresh_cache-fd311606-a314-4030-9d51-929993ab6b14" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2082.132371] env[62405]: DEBUG nova.compute.manager [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: fd311606-a314-4030-9d51-929993ab6b14] Instance network_info: |[{"id": "277de975-3957-41da-9e47-47a0be7e666f", "address": "fa:16:3e:7d:f0:f1", "network": {"id": "3ca0a5a2-a18f-47fa-9d7b-0e27aa414878", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1312490542-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f3b50cc219314108945bfc8b2c21849a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7edb7c08-2fae-4df5-9ec6-5ccf06d7e337", "external-id": "nsx-vlan-transportzone-309", "segmentation_id": 309, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap277de975-39", "ovs_interfaceid": "277de975-3957-41da-9e47-47a0be7e666f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2082.132811] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: fd311606-a314-4030-9d51-929993ab6b14] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7d:f0:f1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7edb7c08-2fae-4df5-9ec6-5ccf06d7e337', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '277de975-3957-41da-9e47-47a0be7e666f', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2082.140350] env[62405]: DEBUG oslo.service.loopingcall [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2082.140583] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fd311606-a314-4030-9d51-929993ab6b14] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2082.140805] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-63eef978-bcf1-4fe7-85d1-2ac1b5d8a064 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2082.160347] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2082.160347] env[62405]: value = "task-1948232" [ 2082.160347] env[62405]: _type = "Task" [ 2082.160347] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2082.167792] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1948232, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2082.255563] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 46240f5b-c6ab-481b-b20c-80cc727a79f4] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 2082.395582] env[62405]: DEBUG oslo_vmware.api [None req-d15ecd0b-3041-44f2-82e6-8d58165672ba tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948230, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.353897} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2082.395857] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-d15ecd0b-3041-44f2-82e6-8d58165672ba tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2082.396059] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-d15ecd0b-3041-44f2-82e6-8d58165672ba tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2082.396269] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-d15ecd0b-3041-44f2-82e6-8d58165672ba tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2082.396453] env[62405]: INFO nova.compute.manager [None req-d15ecd0b-3041-44f2-82e6-8d58165672ba tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Took 1.63 seconds to destroy the instance on the hypervisor. [ 2082.397026] env[62405]: DEBUG oslo.service.loopingcall [None req-d15ecd0b-3041-44f2-82e6-8d58165672ba tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2082.397026] env[62405]: DEBUG nova.compute.manager [-] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2082.397026] env[62405]: DEBUG nova.network.neutron [-] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2082.504878] env[62405]: DEBUG oslo_vmware.api [None req-10d6fc18-bac1-40a8-9cc2-96b978b9edbe tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Task: {'id': task-1948231, 'name': Rename_Task, 'duration_secs': 0.144062} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2082.506196] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-10d6fc18-bac1-40a8-9cc2-96b978b9edbe tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] [instance: 34f4f278-bd4d-43f9-af83-adb48cfb0adc] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2082.507378] env[62405]: DEBUG nova.compute.manager [req-f90063f2-098b-4291-a6a5-10b21f091ad9 req-65dbc955-99ab-4109-97f3-d0c3aece704c service nova] [instance: fd311606-a314-4030-9d51-929993ab6b14] Received event network-changed-277de975-3957-41da-9e47-47a0be7e666f {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2082.507565] env[62405]: DEBUG nova.compute.manager [req-f90063f2-098b-4291-a6a5-10b21f091ad9 req-65dbc955-99ab-4109-97f3-d0c3aece704c service nova] [instance: fd311606-a314-4030-9d51-929993ab6b14] Refreshing instance network info cache due to event network-changed-277de975-3957-41da-9e47-47a0be7e666f. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 2082.507776] env[62405]: DEBUG oslo_concurrency.lockutils [req-f90063f2-098b-4291-a6a5-10b21f091ad9 req-65dbc955-99ab-4109-97f3-d0c3aece704c service nova] Acquiring lock "refresh_cache-fd311606-a314-4030-9d51-929993ab6b14" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2082.507950] env[62405]: DEBUG oslo_concurrency.lockutils [req-f90063f2-098b-4291-a6a5-10b21f091ad9 req-65dbc955-99ab-4109-97f3-d0c3aece704c service nova] Acquired lock "refresh_cache-fd311606-a314-4030-9d51-929993ab6b14" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2082.508135] env[62405]: DEBUG nova.network.neutron [req-f90063f2-098b-4291-a6a5-10b21f091ad9 req-65dbc955-99ab-4109-97f3-d0c3aece704c service nova] [instance: fd311606-a314-4030-9d51-929993ab6b14] Refreshing network info cache for port 277de975-3957-41da-9e47-47a0be7e666f {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2082.509519] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-57a19d1b-6769-4fd1-a2a5-640281510117 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2082.516904] env[62405]: DEBUG oslo_vmware.api [None req-10d6fc18-bac1-40a8-9cc2-96b978b9edbe tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Waiting for the task: (returnval){ [ 2082.516904] env[62405]: value = "task-1948233" [ 2082.516904] env[62405]: _type = "Task" [ 2082.516904] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2082.526107] env[62405]: DEBUG oslo_vmware.api [None req-10d6fc18-bac1-40a8-9cc2-96b978b9edbe tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Task: {'id': task-1948233, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2082.670634] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1948232, 'name': CreateVM_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2082.759597] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: d186b2f4-3fd1-44be-b8a4-080972aff3a0] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 2083.027558] env[62405]: DEBUG oslo_vmware.api [None req-10d6fc18-bac1-40a8-9cc2-96b978b9edbe tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Task: {'id': task-1948233, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2083.171144] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1948232, 'name': CreateVM_Task, 'duration_secs': 0.58179} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2083.171278] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fd311606-a314-4030-9d51-929993ab6b14] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2083.182792] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2083.183311] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2083.183311] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2083.183617] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5479fb48-7589-4d6c-bc32-0b8d8e7ea5a3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2083.189013] env[62405]: DEBUG oslo_vmware.api [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for the task: (returnval){ [ 2083.189013] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52392524-9d24-046e-34f9-dfc8721d3e51" [ 2083.189013] env[62405]: _type = "Task" [ 2083.189013] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2083.199403] env[62405]: DEBUG oslo_vmware.api [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52392524-9d24-046e-34f9-dfc8721d3e51, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2083.225633] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Acquiring lock "9c30bac3-d4f0-4779-9f6e-bc83bb84b001" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2083.225633] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Lock "9c30bac3-d4f0-4779-9f6e-bc83bb84b001" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2083.232240] env[62405]: DEBUG nova.network.neutron [req-f90063f2-098b-4291-a6a5-10b21f091ad9 req-65dbc955-99ab-4109-97f3-d0c3aece704c service nova] [instance: fd311606-a314-4030-9d51-929993ab6b14] Updated VIF entry in instance network info cache for port 277de975-3957-41da-9e47-47a0be7e666f. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2083.232599] env[62405]: DEBUG nova.network.neutron [req-f90063f2-098b-4291-a6a5-10b21f091ad9 req-65dbc955-99ab-4109-97f3-d0c3aece704c service nova] [instance: fd311606-a314-4030-9d51-929993ab6b14] Updating instance_info_cache with network_info: [{"id": "277de975-3957-41da-9e47-47a0be7e666f", "address": "fa:16:3e:7d:f0:f1", "network": {"id": "3ca0a5a2-a18f-47fa-9d7b-0e27aa414878", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1312490542-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f3b50cc219314108945bfc8b2c21849a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7edb7c08-2fae-4df5-9ec6-5ccf06d7e337", "external-id": "nsx-vlan-transportzone-309", "segmentation_id": 309, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap277de975-39", "ovs_interfaceid": "277de975-3957-41da-9e47-47a0be7e666f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2083.262584] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: a1a84837-deef-4ffc-8a47-4891bfc2c87a] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 2083.348039] env[62405]: DEBUG nova.network.neutron [-] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2083.527719] env[62405]: DEBUG oslo_vmware.api [None req-10d6fc18-bac1-40a8-9cc2-96b978b9edbe tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Task: {'id': task-1948233, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2083.699399] env[62405]: DEBUG oslo_vmware.api [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52392524-9d24-046e-34f9-dfc8721d3e51, 'name': SearchDatastore_Task, 'duration_secs': 0.015296} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2083.699711] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2083.699959] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: fd311606-a314-4030-9d51-929993ab6b14] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2083.700239] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2083.700410] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2083.700602] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2083.700996] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7a09d467-c783-4bf1-bf57-ce8fab2378bc {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2083.727281] env[62405]: DEBUG nova.compute.manager [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 9c30bac3-d4f0-4779-9f6e-bc83bb84b001] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2083.734612] env[62405]: DEBUG oslo_concurrency.lockutils [req-f90063f2-098b-4291-a6a5-10b21f091ad9 req-65dbc955-99ab-4109-97f3-d0c3aece704c service nova] Releasing lock "refresh_cache-fd311606-a314-4030-9d51-929993ab6b14" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2083.765678] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 79548471-56f8-410c-a664-d2242541cd2a] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 2083.850749] env[62405]: INFO nova.compute.manager [-] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Took 1.45 seconds to deallocate network for instance. [ 2084.031022] env[62405]: DEBUG oslo_vmware.api [None req-10d6fc18-bac1-40a8-9cc2-96b978b9edbe tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Task: {'id': task-1948233, 'name': PowerOnVM_Task, 'duration_secs': 1.273936} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2084.031422] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-10d6fc18-bac1-40a8-9cc2-96b978b9edbe tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] [instance: 34f4f278-bd4d-43f9-af83-adb48cfb0adc] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2084.031733] env[62405]: DEBUG nova.compute.manager [None req-10d6fc18-bac1-40a8-9cc2-96b978b9edbe tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] [instance: 34f4f278-bd4d-43f9-af83-adb48cfb0adc] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2084.032848] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c73d30a-3627-4b3b-ad23-7e708725c9ed {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2084.249893] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2084.250257] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2084.251772] env[62405]: INFO nova.compute.claims [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 9c30bac3-d4f0-4779-9f6e-bc83bb84b001] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2084.268886] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: dbb5dda5-5420-4d7b-8b32-152d51cb2fb9] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 2084.357408] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d15ecd0b-3041-44f2-82e6-8d58165672ba tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2084.422173] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2084.422409] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2084.423416] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-53100d69-ad79-47c2-8406-dc9afa135581 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2084.429417] env[62405]: DEBUG oslo_vmware.api [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for the task: (returnval){ [ 2084.429417] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]521dab66-fc64-23db-ba73-e8ff3853e6cb" [ 2084.429417] env[62405]: _type = "Task" [ 2084.429417] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2084.438297] env[62405]: DEBUG oslo_vmware.api [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]521dab66-fc64-23db-ba73-e8ff3853e6cb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2084.528058] env[62405]: DEBUG nova.compute.manager [req-416ff49b-9ac3-469b-92a9-89c64def0864 req-c2a9e2ce-2ad2-4d3f-ab25-d890c0abf41f service nova] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Received event network-vif-deleted-e84f02c8-cde2-4f59-88cd-ef80e8cc1bba {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2084.549873] env[62405]: DEBUG oslo_concurrency.lockutils [None req-10d6fc18-bac1-40a8-9cc2-96b978b9edbe tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2084.772231] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 9b495caf-4394-40c0-b68f-d02c7d759a6a] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 2084.920782] env[62405]: DEBUG oslo_concurrency.lockutils [None req-fe1123ab-0e01-4cc9-9eb8-63604a9317c1 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Acquiring lock "34f4f278-bd4d-43f9-af83-adb48cfb0adc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2084.921137] env[62405]: DEBUG oslo_concurrency.lockutils [None req-fe1123ab-0e01-4cc9-9eb8-63604a9317c1 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Lock "34f4f278-bd4d-43f9-af83-adb48cfb0adc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2084.921391] env[62405]: DEBUG oslo_concurrency.lockutils [None req-fe1123ab-0e01-4cc9-9eb8-63604a9317c1 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Acquiring lock "34f4f278-bd4d-43f9-af83-adb48cfb0adc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2084.921599] env[62405]: DEBUG oslo_concurrency.lockutils [None req-fe1123ab-0e01-4cc9-9eb8-63604a9317c1 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Lock "34f4f278-bd4d-43f9-af83-adb48cfb0adc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2084.921780] env[62405]: DEBUG oslo_concurrency.lockutils [None req-fe1123ab-0e01-4cc9-9eb8-63604a9317c1 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Lock "34f4f278-bd4d-43f9-af83-adb48cfb0adc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2084.924293] env[62405]: INFO nova.compute.manager [None req-fe1123ab-0e01-4cc9-9eb8-63604a9317c1 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] [instance: 34f4f278-bd4d-43f9-af83-adb48cfb0adc] Terminating instance [ 2084.940604] env[62405]: DEBUG oslo_vmware.api [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]521dab66-fc64-23db-ba73-e8ff3853e6cb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2085.275359] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 4c8c0d2f-d8d3-4422-8a5c-8999636b22be] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 2085.421360] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63eb6caf-9ab4-4a5b-b0be-850b562cf034 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.428606] env[62405]: DEBUG oslo_concurrency.lockutils [None req-fe1123ab-0e01-4cc9-9eb8-63604a9317c1 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Acquiring lock "refresh_cache-34f4f278-bd4d-43f9-af83-adb48cfb0adc" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2085.428782] env[62405]: DEBUG oslo_concurrency.lockutils [None req-fe1123ab-0e01-4cc9-9eb8-63604a9317c1 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Acquired lock "refresh_cache-34f4f278-bd4d-43f9-af83-adb48cfb0adc" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2085.428960] env[62405]: DEBUG nova.network.neutron [None req-fe1123ab-0e01-4cc9-9eb8-63604a9317c1 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] [instance: 34f4f278-bd4d-43f9-af83-adb48cfb0adc] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2085.430839] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8209b34d-0996-46e9-afbb-7d87a05e4e16 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.467706] env[62405]: DEBUG oslo_vmware.api [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]521dab66-fc64-23db-ba73-e8ff3853e6cb, 'name': SearchDatastore_Task, 'duration_secs': 0.734207} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2085.469176] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48544043-949e-46c6-aa96-55837f3c3f9d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.471667] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9ff873b5-aeb6-4a7d-923f-9a64eb48cd17 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.480054] env[62405]: DEBUG oslo_vmware.api [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for the task: (returnval){ [ 2085.480054] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]523a708e-79e1-052a-1de6-2db70091a468" [ 2085.480054] env[62405]: _type = "Task" [ 2085.480054] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2085.480995] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d712934d-2cf6-4cfa-bb01-0dc239924ee1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.497750] env[62405]: DEBUG nova.compute.provider_tree [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2085.502473] env[62405]: DEBUG oslo_vmware.api [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]523a708e-79e1-052a-1de6-2db70091a468, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2085.781088] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 2c623c00-92f2-4cc4-8503-963c3308d708] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 2085.951861] env[62405]: DEBUG nova.network.neutron [None req-fe1123ab-0e01-4cc9-9eb8-63604a9317c1 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] [instance: 34f4f278-bd4d-43f9-af83-adb48cfb0adc] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2085.995039] env[62405]: DEBUG oslo_vmware.api [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]523a708e-79e1-052a-1de6-2db70091a468, 'name': SearchDatastore_Task, 'duration_secs': 0.017687} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2085.995267] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2085.995523] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] fd311606-a314-4030-9d51-929993ab6b14/fd311606-a314-4030-9d51-929993ab6b14.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2085.995774] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-12a1662a-a5d5-4ca7-8656-45580647224d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2086.002386] env[62405]: DEBUG oslo_vmware.api [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for the task: (returnval){ [ 2086.002386] env[62405]: value = "task-1948234" [ 2086.002386] env[62405]: _type = "Task" [ 2086.002386] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2086.008941] env[62405]: DEBUG nova.network.neutron [None req-fe1123ab-0e01-4cc9-9eb8-63604a9317c1 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] [instance: 34f4f278-bd4d-43f9-af83-adb48cfb0adc] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2086.014308] env[62405]: DEBUG oslo_vmware.api [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948234, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2086.024009] env[62405]: ERROR nova.scheduler.client.report [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [req-73999f32-dcad-44e0-a438-7d64b8966049] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7d5eded7-a501-4fa6-b1d3-60e273d555d7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-73999f32-dcad-44e0-a438-7d64b8966049"}]} [ 2086.039069] env[62405]: DEBUG nova.scheduler.client.report [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Refreshing inventories for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 2086.053741] env[62405]: DEBUG nova.scheduler.client.report [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Updating ProviderTree inventory for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 2086.054014] env[62405]: DEBUG nova.compute.provider_tree [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2086.065282] env[62405]: DEBUG nova.scheduler.client.report [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Refreshing aggregate associations for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7, aggregates: None {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 2086.084393] env[62405]: DEBUG nova.scheduler.client.report [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Refreshing trait associations for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 2086.235438] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12c29f18-c6ab-4c08-92c3-fa49e79dcb4d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2086.244383] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f2f9125-054d-4ad8-b434-0e286a19f7bc {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2086.279078] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-852ad942-8941-4be8-9e6d-802108ccb556 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2086.287893] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: aae3abca-951a-4149-9ccb-d70bea218aea] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 2086.291052] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7cedca5-65b1-447a-8b96-dcf5fdc65a03 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2086.306724] env[62405]: DEBUG nova.compute.provider_tree [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2086.515320] env[62405]: DEBUG oslo_concurrency.lockutils [None req-fe1123ab-0e01-4cc9-9eb8-63604a9317c1 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Releasing lock "refresh_cache-34f4f278-bd4d-43f9-af83-adb48cfb0adc" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2086.515320] env[62405]: DEBUG nova.compute.manager [None req-fe1123ab-0e01-4cc9-9eb8-63604a9317c1 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] [instance: 34f4f278-bd4d-43f9-af83-adb48cfb0adc] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2086.515320] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-fe1123ab-0e01-4cc9-9eb8-63604a9317c1 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] [instance: 34f4f278-bd4d-43f9-af83-adb48cfb0adc] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2086.515599] env[62405]: DEBUG oslo_vmware.api [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948234, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.510714} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2086.516255] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d54bac6-b529-4ccc-823a-9efdf0bf7faa {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2086.519670] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] fd311606-a314-4030-9d51-929993ab6b14/fd311606-a314-4030-9d51-929993ab6b14.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2086.519982] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: fd311606-a314-4030-9d51-929993ab6b14] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2086.520478] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5ba1a8c0-3393-4801-87fa-6f42e40ea30e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2086.526288] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe1123ab-0e01-4cc9-9eb8-63604a9317c1 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] [instance: 34f4f278-bd4d-43f9-af83-adb48cfb0adc] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2086.527422] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-914a2d8a-8a06-48d7-a6d5-09464aec0ab6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2086.528914] env[62405]: DEBUG oslo_vmware.api [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for the task: (returnval){ [ 2086.528914] env[62405]: value = "task-1948235" [ 2086.528914] env[62405]: _type = "Task" [ 2086.528914] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2086.533550] env[62405]: DEBUG oslo_vmware.api [None req-fe1123ab-0e01-4cc9-9eb8-63604a9317c1 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Waiting for the task: (returnval){ [ 2086.533550] env[62405]: value = "task-1948236" [ 2086.533550] env[62405]: _type = "Task" [ 2086.533550] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2086.539061] env[62405]: DEBUG oslo_vmware.api [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948235, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2086.543862] env[62405]: DEBUG oslo_vmware.api [None req-fe1123ab-0e01-4cc9-9eb8-63604a9317c1 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Task: {'id': task-1948236, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2086.796511] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 34ec55c6-1a7a-4ffa-8efd-9eedd7495d44] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 2086.835231] env[62405]: DEBUG nova.scheduler.client.report [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Updated inventory for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with generation 180 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 2086.835512] env[62405]: DEBUG nova.compute.provider_tree [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Updating resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 generation from 180 to 181 during operation: update_inventory {{(pid=62405) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2086.835697] env[62405]: DEBUG nova.compute.provider_tree [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2087.041086] env[62405]: DEBUG oslo_vmware.api [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948235, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075333} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2087.041086] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: fd311606-a314-4030-9d51-929993ab6b14] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2087.041746] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e8b9fbe-3a25-41d8-a6ed-e26fad1302a0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2087.046701] env[62405]: DEBUG oslo_vmware.api [None req-fe1123ab-0e01-4cc9-9eb8-63604a9317c1 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Task: {'id': task-1948236, 'name': PowerOffVM_Task, 'duration_secs': 0.117175} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2087.047390] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe1123ab-0e01-4cc9-9eb8-63604a9317c1 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] [instance: 34f4f278-bd4d-43f9-af83-adb48cfb0adc] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2087.047690] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-fe1123ab-0e01-4cc9-9eb8-63604a9317c1 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] [instance: 34f4f278-bd4d-43f9-af83-adb48cfb0adc] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2087.048044] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d655ea49-ad2f-41b8-bdef-bbe0bc99b699 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2087.066020] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: fd311606-a314-4030-9d51-929993ab6b14] Reconfiguring VM instance instance-00000075 to attach disk [datastore1] fd311606-a314-4030-9d51-929993ab6b14/fd311606-a314-4030-9d51-929993ab6b14.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2087.066725] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6909492b-4ca8-4c1b-bf91-ddf0afd0170b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2087.088839] env[62405]: DEBUG oslo_vmware.api [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for the task: (returnval){ [ 2087.088839] env[62405]: value = "task-1948238" [ 2087.088839] env[62405]: _type = "Task" [ 2087.088839] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2087.089592] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-fe1123ab-0e01-4cc9-9eb8-63604a9317c1 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] [instance: 34f4f278-bd4d-43f9-af83-adb48cfb0adc] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2087.089790] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-fe1123ab-0e01-4cc9-9eb8-63604a9317c1 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] [instance: 34f4f278-bd4d-43f9-af83-adb48cfb0adc] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2087.089992] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe1123ab-0e01-4cc9-9eb8-63604a9317c1 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Deleting the datastore file [datastore1] 34f4f278-bd4d-43f9-af83-adb48cfb0adc {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2087.092841] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-93f4e02a-0ea7-4b92-a299-c3a178ee0910 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2087.100517] env[62405]: DEBUG oslo_vmware.api [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948238, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2087.101649] env[62405]: DEBUG oslo_vmware.api [None req-fe1123ab-0e01-4cc9-9eb8-63604a9317c1 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Waiting for the task: (returnval){ [ 2087.101649] env[62405]: value = "task-1948239" [ 2087.101649] env[62405]: _type = "Task" [ 2087.101649] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2087.108621] env[62405]: DEBUG oslo_vmware.api [None req-fe1123ab-0e01-4cc9-9eb8-63604a9317c1 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Task: {'id': task-1948239, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2087.304469] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 3c9487ff-2092-4cde-82d5-b38e5bc5c6e3] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 2087.340950] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.091s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2087.341530] env[62405]: DEBUG nova.compute.manager [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 9c30bac3-d4f0-4779-9f6e-bc83bb84b001] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2087.344783] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d15ecd0b-3041-44f2-82e6-8d58165672ba tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.988s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2087.345240] env[62405]: DEBUG nova.objects.instance [None req-d15ecd0b-3041-44f2-82e6-8d58165672ba tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Lazy-loading 'resources' on Instance uuid 6fcfada3-d73a-4814-bf45-d34b26d76d4a {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2087.599596] env[62405]: DEBUG oslo_vmware.api [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948238, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2087.610572] env[62405]: DEBUG oslo_vmware.api [None req-fe1123ab-0e01-4cc9-9eb8-63604a9317c1 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Task: {'id': task-1948239, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.151389} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2087.610819] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe1123ab-0e01-4cc9-9eb8-63604a9317c1 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2087.611041] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-fe1123ab-0e01-4cc9-9eb8-63604a9317c1 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] [instance: 34f4f278-bd4d-43f9-af83-adb48cfb0adc] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2087.611236] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-fe1123ab-0e01-4cc9-9eb8-63604a9317c1 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] [instance: 34f4f278-bd4d-43f9-af83-adb48cfb0adc] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2087.611413] env[62405]: INFO nova.compute.manager [None req-fe1123ab-0e01-4cc9-9eb8-63604a9317c1 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] [instance: 34f4f278-bd4d-43f9-af83-adb48cfb0adc] Took 1.10 seconds to destroy the instance on the hypervisor. [ 2087.611651] env[62405]: DEBUG oslo.service.loopingcall [None req-fe1123ab-0e01-4cc9-9eb8-63604a9317c1 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2087.611840] env[62405]: DEBUG nova.compute.manager [-] [instance: 34f4f278-bd4d-43f9-af83-adb48cfb0adc] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2087.611933] env[62405]: DEBUG nova.network.neutron [-] [instance: 34f4f278-bd4d-43f9-af83-adb48cfb0adc] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2087.632579] env[62405]: DEBUG nova.network.neutron [-] [instance: 34f4f278-bd4d-43f9-af83-adb48cfb0adc] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2087.806577] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 4d59d9fd-23df-4933-97ed-32602e51e9aa] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 2087.846319] env[62405]: DEBUG nova.compute.utils [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2087.848365] env[62405]: DEBUG nova.compute.manager [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 9c30bac3-d4f0-4779-9f6e-bc83bb84b001] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2087.848547] env[62405]: DEBUG nova.network.neutron [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 9c30bac3-d4f0-4779-9f6e-bc83bb84b001] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2087.907220] env[62405]: DEBUG nova.policy [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b4ac1534df994c18bad62ec85acbc69f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '03a423f493034065bb1591d14d215ed8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 2088.010575] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29ab77f7-acf4-4124-b490-c7b3d3f0765b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2088.017868] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80caf2f8-bc73-476a-9112-1b3b776df56d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2088.047024] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e02dba74-445e-49a8-b26a-c2808aa2fdbb {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2088.054404] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-152e595a-1f0e-4b20-9783-4345e45e11d6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2088.068873] env[62405]: DEBUG nova.compute.provider_tree [None req-d15ecd0b-3041-44f2-82e6-8d58165672ba tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2088.101317] env[62405]: DEBUG oslo_vmware.api [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948238, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2088.135332] env[62405]: DEBUG nova.network.neutron [-] [instance: 34f4f278-bd4d-43f9-af83-adb48cfb0adc] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2088.174537] env[62405]: DEBUG nova.network.neutron [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 9c30bac3-d4f0-4779-9f6e-bc83bb84b001] Successfully created port: 6abc3e61-4638-4911-b589-f37ab143d2b1 {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2088.309617] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 67bf25ea-5774-4246-a3e6-2aeb0ebf6731] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 2088.349297] env[62405]: DEBUG nova.compute.manager [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 9c30bac3-d4f0-4779-9f6e-bc83bb84b001] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2088.572042] env[62405]: DEBUG nova.scheduler.client.report [None req-d15ecd0b-3041-44f2-82e6-8d58165672ba tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2088.602567] env[62405]: DEBUG oslo_vmware.api [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948238, 'name': ReconfigVM_Task, 'duration_secs': 1.512134} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2088.602992] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: fd311606-a314-4030-9d51-929993ab6b14] Reconfigured VM instance instance-00000075 to attach disk [datastore1] fd311606-a314-4030-9d51-929993ab6b14/fd311606-a314-4030-9d51-929993ab6b14.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2088.603799] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-064a17c2-9c76-476d-ab6c-5baa1f72a861 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2088.615405] env[62405]: DEBUG oslo_vmware.api [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for the task: (returnval){ [ 2088.615405] env[62405]: value = "task-1948240" [ 2088.615405] env[62405]: _type = "Task" [ 2088.615405] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2088.640027] env[62405]: INFO nova.compute.manager [-] [instance: 34f4f278-bd4d-43f9-af83-adb48cfb0adc] Took 1.03 seconds to deallocate network for instance. [ 2088.640202] env[62405]: DEBUG oslo_vmware.api [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948240, 'name': Rename_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2088.812965] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 78b4c6ea-6f5b-40d8-8c4a-10332f176e0b] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 2089.077488] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d15ecd0b-3041-44f2-82e6-8d58165672ba tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.733s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2089.080057] env[62405]: DEBUG oslo_concurrency.lockutils [None req-10d6fc18-bac1-40a8-9cc2-96b978b9edbe tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 4.530s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2089.080491] env[62405]: DEBUG nova.objects.instance [None req-10d6fc18-bac1-40a8-9cc2-96b978b9edbe tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] [instance: 34f4f278-bd4d-43f9-af83-adb48cfb0adc] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62405) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 2089.100226] env[62405]: INFO nova.scheduler.client.report [None req-d15ecd0b-3041-44f2-82e6-8d58165672ba tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Deleted allocations for instance 6fcfada3-d73a-4814-bf45-d34b26d76d4a [ 2089.125631] env[62405]: DEBUG oslo_vmware.api [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948240, 'name': Rename_Task, 'duration_secs': 0.164724} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2089.125903] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: fd311606-a314-4030-9d51-929993ab6b14] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2089.126159] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dd034750-153b-4db7-99e5-6c711b2f80c3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2089.132876] env[62405]: DEBUG oslo_vmware.api [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for the task: (returnval){ [ 2089.132876] env[62405]: value = "task-1948241" [ 2089.132876] env[62405]: _type = "Task" [ 2089.132876] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2089.148887] env[62405]: DEBUG oslo_vmware.api [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948241, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2089.153350] env[62405]: DEBUG oslo_concurrency.lockutils [None req-fe1123ab-0e01-4cc9-9eb8-63604a9317c1 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2089.317495] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 15218373-ffa5-49ce-b604-423b7fc5fb35] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 2089.357204] env[62405]: DEBUG nova.compute.manager [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 9c30bac3-d4f0-4779-9f6e-bc83bb84b001] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2089.383389] env[62405]: DEBUG nova.virt.hardware [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2089.383646] env[62405]: DEBUG nova.virt.hardware [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2089.383810] env[62405]: DEBUG nova.virt.hardware [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2089.383993] env[62405]: DEBUG nova.virt.hardware [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2089.384157] env[62405]: DEBUG nova.virt.hardware [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2089.384308] env[62405]: DEBUG nova.virt.hardware [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2089.384517] env[62405]: DEBUG nova.virt.hardware [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2089.384676] env[62405]: DEBUG nova.virt.hardware [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2089.384842] env[62405]: DEBUG nova.virt.hardware [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2089.385013] env[62405]: DEBUG nova.virt.hardware [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2089.385201] env[62405]: DEBUG nova.virt.hardware [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2089.386281] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b6e6a88-c10a-4118-8290-c56e8d4fd845 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2089.395552] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d68fa92-d71e-4a67-a92f-70e0987b4747 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2089.545082] env[62405]: DEBUG nova.compute.manager [req-5b0727bb-af32-48fa-8d17-0a3599d0f5b0 req-df4f94e0-7e9a-4d5e-b9e0-36b7d3f517fe service nova] [instance: 9c30bac3-d4f0-4779-9f6e-bc83bb84b001] Received event network-vif-plugged-6abc3e61-4638-4911-b589-f37ab143d2b1 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2089.545327] env[62405]: DEBUG oslo_concurrency.lockutils [req-5b0727bb-af32-48fa-8d17-0a3599d0f5b0 req-df4f94e0-7e9a-4d5e-b9e0-36b7d3f517fe service nova] Acquiring lock "9c30bac3-d4f0-4779-9f6e-bc83bb84b001-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2089.545688] env[62405]: DEBUG oslo_concurrency.lockutils [req-5b0727bb-af32-48fa-8d17-0a3599d0f5b0 req-df4f94e0-7e9a-4d5e-b9e0-36b7d3f517fe service nova] Lock "9c30bac3-d4f0-4779-9f6e-bc83bb84b001-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2089.545785] env[62405]: DEBUG oslo_concurrency.lockutils [req-5b0727bb-af32-48fa-8d17-0a3599d0f5b0 req-df4f94e0-7e9a-4d5e-b9e0-36b7d3f517fe service nova] Lock "9c30bac3-d4f0-4779-9f6e-bc83bb84b001-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2089.545944] env[62405]: DEBUG nova.compute.manager [req-5b0727bb-af32-48fa-8d17-0a3599d0f5b0 req-df4f94e0-7e9a-4d5e-b9e0-36b7d3f517fe service nova] [instance: 9c30bac3-d4f0-4779-9f6e-bc83bb84b001] No waiting events found dispatching network-vif-plugged-6abc3e61-4638-4911-b589-f37ab143d2b1 {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2089.546154] env[62405]: WARNING nova.compute.manager [req-5b0727bb-af32-48fa-8d17-0a3599d0f5b0 req-df4f94e0-7e9a-4d5e-b9e0-36b7d3f517fe service nova] [instance: 9c30bac3-d4f0-4779-9f6e-bc83bb84b001] Received unexpected event network-vif-plugged-6abc3e61-4638-4911-b589-f37ab143d2b1 for instance with vm_state building and task_state spawning. [ 2089.608433] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d15ecd0b-3041-44f2-82e6-8d58165672ba tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Lock "6fcfada3-d73a-4814-bf45-d34b26d76d4a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.351s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2089.643719] env[62405]: DEBUG oslo_vmware.api [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948241, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2089.820766] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 777ddb84-25b9-4da6-be6b-a2289dbf510a] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 2089.832273] env[62405]: DEBUG nova.network.neutron [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 9c30bac3-d4f0-4779-9f6e-bc83bb84b001] Successfully updated port: 6abc3e61-4638-4911-b589-f37ab143d2b1 {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2090.089538] env[62405]: DEBUG oslo_concurrency.lockutils [None req-10d6fc18-bac1-40a8-9cc2-96b978b9edbe tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.009s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2090.090736] env[62405]: DEBUG oslo_concurrency.lockutils [None req-fe1123ab-0e01-4cc9-9eb8-63604a9317c1 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.937s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2090.090982] env[62405]: DEBUG nova.objects.instance [None req-fe1123ab-0e01-4cc9-9eb8-63604a9317c1 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Lazy-loading 'resources' on Instance uuid 34f4f278-bd4d-43f9-af83-adb48cfb0adc {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2090.143935] env[62405]: DEBUG oslo_vmware.api [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948241, 'name': PowerOnVM_Task, 'duration_secs': 0.524215} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2090.144223] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: fd311606-a314-4030-9d51-929993ab6b14] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2090.144425] env[62405]: INFO nova.compute.manager [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: fd311606-a314-4030-9d51-929993ab6b14] Took 12.56 seconds to spawn the instance on the hypervisor. [ 2090.144604] env[62405]: DEBUG nova.compute.manager [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: fd311606-a314-4030-9d51-929993ab6b14] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2090.145354] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38a4b9dd-f574-4e9c-b7e3-f5f17214a147 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2090.324597] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 058682a1-5240-4414-9203-c612ecd12999] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 2090.337238] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Acquiring lock "refresh_cache-9c30bac3-d4f0-4779-9f6e-bc83bb84b001" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2090.337390] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Acquired lock "refresh_cache-9c30bac3-d4f0-4779-9f6e-bc83bb84b001" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2090.337583] env[62405]: DEBUG nova.network.neutron [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 9c30bac3-d4f0-4779-9f6e-bc83bb84b001] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2090.664300] env[62405]: INFO nova.compute.manager [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: fd311606-a314-4030-9d51-929993ab6b14] Took 18.18 seconds to build instance. [ 2090.732609] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22ef7e6f-79cf-4cce-921d-c23138738d37 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2090.740049] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1eaaa608-d3fd-43f7-9027-a249638501cf {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2090.770260] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2403e9fe-f95a-4d4d-9b76-3f9ba5de52a6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2090.777574] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f28fa6a8-7b91-4eab-a47a-7d94e94cbc87 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2090.790377] env[62405]: DEBUG nova.compute.provider_tree [None req-fe1123ab-0e01-4cc9-9eb8-63604a9317c1 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2090.828414] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2090.828688] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Cleaning up deleted instances with incomplete migration {{(pid=62405) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11784}} [ 2090.871322] env[62405]: DEBUG nova.network.neutron [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 9c30bac3-d4f0-4779-9f6e-bc83bb84b001] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2091.020907] env[62405]: DEBUG nova.network.neutron [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 9c30bac3-d4f0-4779-9f6e-bc83bb84b001] Updating instance_info_cache with network_info: [{"id": "6abc3e61-4638-4911-b589-f37ab143d2b1", "address": "fa:16:3e:aa:72:10", "network": {"id": "a7be14dc-1fb5-4a85-a574-ff7086958535", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-359688506-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03a423f493034065bb1591d14d215ed8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77aa121f-8fb6-42f3-aaea-43addfe449b2", "external-id": "nsx-vlan-transportzone-288", "segmentation_id": 288, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6abc3e61-46", "ovs_interfaceid": "6abc3e61-4638-4911-b589-f37ab143d2b1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2091.166642] env[62405]: DEBUG oslo_concurrency.lockutils [None req-0c8df575-118e-41f5-acec-f6939323b4a9 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Lock "fd311606-a314-4030-9d51-929993ab6b14" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.693s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2091.293997] env[62405]: DEBUG nova.scheduler.client.report [None req-fe1123ab-0e01-4cc9-9eb8-63604a9317c1 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2091.330558] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2091.524079] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Releasing lock "refresh_cache-9c30bac3-d4f0-4779-9f6e-bc83bb84b001" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2091.524388] env[62405]: DEBUG nova.compute.manager [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 9c30bac3-d4f0-4779-9f6e-bc83bb84b001] Instance network_info: |[{"id": "6abc3e61-4638-4911-b589-f37ab143d2b1", "address": "fa:16:3e:aa:72:10", "network": {"id": "a7be14dc-1fb5-4a85-a574-ff7086958535", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-359688506-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03a423f493034065bb1591d14d215ed8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77aa121f-8fb6-42f3-aaea-43addfe449b2", "external-id": "nsx-vlan-transportzone-288", "segmentation_id": 288, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6abc3e61-46", "ovs_interfaceid": "6abc3e61-4638-4911-b589-f37ab143d2b1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2091.524822] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 9c30bac3-d4f0-4779-9f6e-bc83bb84b001] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:aa:72:10', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '77aa121f-8fb6-42f3-aaea-43addfe449b2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6abc3e61-4638-4911-b589-f37ab143d2b1', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2091.532513] env[62405]: DEBUG oslo.service.loopingcall [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2091.532731] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9c30bac3-d4f0-4779-9f6e-bc83bb84b001] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2091.532960] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1389e719-5751-4089-9af6-8e177b44c617 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2091.551958] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2091.551958] env[62405]: value = "task-1948242" [ 2091.551958] env[62405]: _type = "Task" [ 2091.551958] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2091.559186] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1948242, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2091.571421] env[62405]: DEBUG nova.compute.manager [req-21fb50b2-1656-4901-ae62-48b626e49afe req-4bbaa702-1ef4-4c3a-bf00-0314a2042c75 service nova] [instance: 9c30bac3-d4f0-4779-9f6e-bc83bb84b001] Received event network-changed-6abc3e61-4638-4911-b589-f37ab143d2b1 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2091.571561] env[62405]: DEBUG nova.compute.manager [req-21fb50b2-1656-4901-ae62-48b626e49afe req-4bbaa702-1ef4-4c3a-bf00-0314a2042c75 service nova] [instance: 9c30bac3-d4f0-4779-9f6e-bc83bb84b001] Refreshing instance network info cache due to event network-changed-6abc3e61-4638-4911-b589-f37ab143d2b1. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 2091.571778] env[62405]: DEBUG oslo_concurrency.lockutils [req-21fb50b2-1656-4901-ae62-48b626e49afe req-4bbaa702-1ef4-4c3a-bf00-0314a2042c75 service nova] Acquiring lock "refresh_cache-9c30bac3-d4f0-4779-9f6e-bc83bb84b001" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2091.571922] env[62405]: DEBUG oslo_concurrency.lockutils [req-21fb50b2-1656-4901-ae62-48b626e49afe req-4bbaa702-1ef4-4c3a-bf00-0314a2042c75 service nova] Acquired lock "refresh_cache-9c30bac3-d4f0-4779-9f6e-bc83bb84b001" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2091.572099] env[62405]: DEBUG nova.network.neutron [req-21fb50b2-1656-4901-ae62-48b626e49afe req-4bbaa702-1ef4-4c3a-bf00-0314a2042c75 service nova] [instance: 9c30bac3-d4f0-4779-9f6e-bc83bb84b001] Refreshing network info cache for port 6abc3e61-4638-4911-b589-f37ab143d2b1 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2091.647323] env[62405]: DEBUG nova.compute.manager [req-1e4c9d9e-7d62-451e-a7df-28632b52513b req-9846d5a1-9bf4-4ffe-bd6a-ebc6ea4dee56 service nova] [instance: fd311606-a314-4030-9d51-929993ab6b14] Received event network-changed-277de975-3957-41da-9e47-47a0be7e666f {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2091.647527] env[62405]: DEBUG nova.compute.manager [req-1e4c9d9e-7d62-451e-a7df-28632b52513b req-9846d5a1-9bf4-4ffe-bd6a-ebc6ea4dee56 service nova] [instance: fd311606-a314-4030-9d51-929993ab6b14] Refreshing instance network info cache due to event network-changed-277de975-3957-41da-9e47-47a0be7e666f. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 2091.647736] env[62405]: DEBUG oslo_concurrency.lockutils [req-1e4c9d9e-7d62-451e-a7df-28632b52513b req-9846d5a1-9bf4-4ffe-bd6a-ebc6ea4dee56 service nova] Acquiring lock "refresh_cache-fd311606-a314-4030-9d51-929993ab6b14" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2091.647881] env[62405]: DEBUG oslo_concurrency.lockutils [req-1e4c9d9e-7d62-451e-a7df-28632b52513b req-9846d5a1-9bf4-4ffe-bd6a-ebc6ea4dee56 service nova] Acquired lock "refresh_cache-fd311606-a314-4030-9d51-929993ab6b14" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2091.648106] env[62405]: DEBUG nova.network.neutron [req-1e4c9d9e-7d62-451e-a7df-28632b52513b req-9846d5a1-9bf4-4ffe-bd6a-ebc6ea4dee56 service nova] [instance: fd311606-a314-4030-9d51-929993ab6b14] Refreshing network info cache for port 277de975-3957-41da-9e47-47a0be7e666f {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2091.694936] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Acquiring lock "8185f9bc-48d5-4cb7-a48d-f744ff704868" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2091.695169] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Lock "8185f9bc-48d5-4cb7-a48d-f744ff704868" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2091.798808] env[62405]: DEBUG oslo_concurrency.lockutils [None req-fe1123ab-0e01-4cc9-9eb8-63604a9317c1 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.708s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2091.821525] env[62405]: INFO nova.scheduler.client.report [None req-fe1123ab-0e01-4cc9-9eb8-63604a9317c1 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Deleted allocations for instance 34f4f278-bd4d-43f9-af83-adb48cfb0adc [ 2092.079074] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1948242, 'name': CreateVM_Task, 'duration_secs': 0.380299} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2092.079074] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9c30bac3-d4f0-4779-9f6e-bc83bb84b001] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2092.079074] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2092.079074] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2092.079074] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2092.080147] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-beac7119-de38-4d39-8c8a-6402f154aa70 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2092.085020] env[62405]: DEBUG oslo_vmware.api [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for the task: (returnval){ [ 2092.085020] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52de38ef-01ed-1a83-7d53-19921ba0c20d" [ 2092.085020] env[62405]: _type = "Task" [ 2092.085020] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2092.093849] env[62405]: DEBUG oslo_vmware.api [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52de38ef-01ed-1a83-7d53-19921ba0c20d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2092.198373] env[62405]: DEBUG nova.compute.manager [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2092.322687] env[62405]: DEBUG nova.network.neutron [req-21fb50b2-1656-4901-ae62-48b626e49afe req-4bbaa702-1ef4-4c3a-bf00-0314a2042c75 service nova] [instance: 9c30bac3-d4f0-4779-9f6e-bc83bb84b001] Updated VIF entry in instance network info cache for port 6abc3e61-4638-4911-b589-f37ab143d2b1. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2092.324987] env[62405]: DEBUG nova.network.neutron [req-21fb50b2-1656-4901-ae62-48b626e49afe req-4bbaa702-1ef4-4c3a-bf00-0314a2042c75 service nova] [instance: 9c30bac3-d4f0-4779-9f6e-bc83bb84b001] Updating instance_info_cache with network_info: [{"id": "6abc3e61-4638-4911-b589-f37ab143d2b1", "address": "fa:16:3e:aa:72:10", "network": {"id": "a7be14dc-1fb5-4a85-a574-ff7086958535", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-359688506-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03a423f493034065bb1591d14d215ed8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77aa121f-8fb6-42f3-aaea-43addfe449b2", "external-id": "nsx-vlan-transportzone-288", "segmentation_id": 288, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6abc3e61-46", "ovs_interfaceid": "6abc3e61-4638-4911-b589-f37ab143d2b1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2092.331702] env[62405]: DEBUG oslo_concurrency.lockutils [None req-fe1123ab-0e01-4cc9-9eb8-63604a9317c1 tempest-ServerShowV257Test-91255361 tempest-ServerShowV257Test-91255361-project-member] Lock "34f4f278-bd4d-43f9-af83-adb48cfb0adc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.411s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2092.494123] env[62405]: DEBUG nova.network.neutron [req-1e4c9d9e-7d62-451e-a7df-28632b52513b req-9846d5a1-9bf4-4ffe-bd6a-ebc6ea4dee56 service nova] [instance: fd311606-a314-4030-9d51-929993ab6b14] Updated VIF entry in instance network info cache for port 277de975-3957-41da-9e47-47a0be7e666f. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2092.494551] env[62405]: DEBUG nova.network.neutron [req-1e4c9d9e-7d62-451e-a7df-28632b52513b req-9846d5a1-9bf4-4ffe-bd6a-ebc6ea4dee56 service nova] [instance: fd311606-a314-4030-9d51-929993ab6b14] Updating instance_info_cache with network_info: [{"id": "277de975-3957-41da-9e47-47a0be7e666f", "address": "fa:16:3e:7d:f0:f1", "network": {"id": "3ca0a5a2-a18f-47fa-9d7b-0e27aa414878", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1312490542-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.142", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f3b50cc219314108945bfc8b2c21849a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7edb7c08-2fae-4df5-9ec6-5ccf06d7e337", "external-id": "nsx-vlan-transportzone-309", "segmentation_id": 309, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap277de975-39", "ovs_interfaceid": "277de975-3957-41da-9e47-47a0be7e666f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2092.595291] env[62405]: DEBUG oslo_vmware.api [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52de38ef-01ed-1a83-7d53-19921ba0c20d, 'name': SearchDatastore_Task, 'duration_secs': 0.011162} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2092.595610] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2092.595850] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 9c30bac3-d4f0-4779-9f6e-bc83bb84b001] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2092.596098] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2092.596253] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2092.596436] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2092.596697] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ab5b5a5f-f7b9-49b3-aec3-b69a22d1e438 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2092.604746] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2092.604929] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2092.605646] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0c97b3ed-634f-4be1-ac86-72aae3eb60be {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2092.610822] env[62405]: DEBUG oslo_vmware.api [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for the task: (returnval){ [ 2092.610822] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52c0e901-c69a-1be1-6054-afb2fa5416e7" [ 2092.610822] env[62405]: _type = "Task" [ 2092.610822] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2092.617828] env[62405]: DEBUG oslo_vmware.api [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52c0e901-c69a-1be1-6054-afb2fa5416e7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2092.831673] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2092.832039] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2092.834372] env[62405]: INFO nova.compute.claims [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2092.838194] env[62405]: DEBUG oslo_concurrency.lockutils [req-21fb50b2-1656-4901-ae62-48b626e49afe req-4bbaa702-1ef4-4c3a-bf00-0314a2042c75 service nova] Releasing lock "refresh_cache-9c30bac3-d4f0-4779-9f6e-bc83bb84b001" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2092.997182] env[62405]: DEBUG oslo_concurrency.lockutils [req-1e4c9d9e-7d62-451e-a7df-28632b52513b req-9846d5a1-9bf4-4ffe-bd6a-ebc6ea4dee56 service nova] Releasing lock "refresh_cache-fd311606-a314-4030-9d51-929993ab6b14" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2093.121322] env[62405]: DEBUG oslo_vmware.api [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52c0e901-c69a-1be1-6054-afb2fa5416e7, 'name': SearchDatastore_Task, 'duration_secs': 0.007815} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2093.122116] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7de28b5f-db2f-46ee-a202-c809f7e54f3a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2093.127403] env[62405]: DEBUG oslo_vmware.api [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for the task: (returnval){ [ 2093.127403] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]528c1c4b-cbbd-47a7-68ba-bf956b67f444" [ 2093.127403] env[62405]: _type = "Task" [ 2093.127403] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2093.136461] env[62405]: DEBUG oslo_vmware.api [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]528c1c4b-cbbd-47a7-68ba-bf956b67f444, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2093.638181] env[62405]: DEBUG oslo_vmware.api [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]528c1c4b-cbbd-47a7-68ba-bf956b67f444, 'name': SearchDatastore_Task, 'duration_secs': 0.045897} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2093.638460] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2093.638706] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 9c30bac3-d4f0-4779-9f6e-bc83bb84b001/9c30bac3-d4f0-4779-9f6e-bc83bb84b001.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2093.638950] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0ce7d242-d348-4dcb-982b-cfd51d268202 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2093.645577] env[62405]: DEBUG oslo_vmware.api [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for the task: (returnval){ [ 2093.645577] env[62405]: value = "task-1948243" [ 2093.645577] env[62405]: _type = "Task" [ 2093.645577] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2093.653139] env[62405]: DEBUG oslo_vmware.api [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948243, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2093.974927] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5e5a98b-03c1-44b3-bb9a-59866413e677 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2093.982327] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c0d8b3f-7032-45eb-9871-553b389e211f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2094.011140] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c74dd3f-47b6-4f1a-a4b6-0592cc52a331 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2094.018582] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb209258-61d2-4c34-8b8f-865d932cfd5c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2094.032428] env[62405]: DEBUG nova.compute.provider_tree [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2094.116921] env[62405]: DEBUG oslo_concurrency.lockutils [None req-75e8cea4-16db-47e6-865d-e7891df9ad68 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Acquiring lock "81d9be97-9147-4754-80c2-68c1a389842e" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2094.117153] env[62405]: DEBUG oslo_concurrency.lockutils [None req-75e8cea4-16db-47e6-865d-e7891df9ad68 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Lock "81d9be97-9147-4754-80c2-68c1a389842e" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2094.155813] env[62405]: DEBUG oslo_vmware.api [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948243, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2094.535519] env[62405]: DEBUG nova.scheduler.client.report [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2094.621035] env[62405]: INFO nova.compute.manager [None req-75e8cea4-16db-47e6-865d-e7891df9ad68 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Detaching volume caee8648-7be2-4e64-811e-8bad831e1865 [ 2094.651763] env[62405]: INFO nova.virt.block_device [None req-75e8cea4-16db-47e6-865d-e7891df9ad68 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Attempting to driver detach volume caee8648-7be2-4e64-811e-8bad831e1865 from mountpoint /dev/sdb [ 2094.652052] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-75e8cea4-16db-47e6-865d-e7891df9ad68 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Volume detach. Driver type: vmdk {{(pid=62405) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2094.652299] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-75e8cea4-16db-47e6-865d-e7891df9ad68 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-401585', 'volume_id': 'caee8648-7be2-4e64-811e-8bad831e1865', 'name': 'volume-caee8648-7be2-4e64-811e-8bad831e1865', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '81d9be97-9147-4754-80c2-68c1a389842e', 'attached_at': '', 'detached_at': '', 'volume_id': 'caee8648-7be2-4e64-811e-8bad831e1865', 'serial': 'caee8648-7be2-4e64-811e-8bad831e1865'} {{(pid=62405) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2094.653087] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5481bfc0-3a3e-4d07-b388-c9bd2fc2b6a9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2094.661456] env[62405]: DEBUG oslo_vmware.api [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948243, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.795765} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2094.676761] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 9c30bac3-d4f0-4779-9f6e-bc83bb84b001/9c30bac3-d4f0-4779-9f6e-bc83bb84b001.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2094.677016] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 9c30bac3-d4f0-4779-9f6e-bc83bb84b001] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2094.677341] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5f65f81a-82f8-454c-856a-f9e1ce7b7a1c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2094.679777] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b03cc557-ea03-4e98-8f57-3dcfe63e8c01 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2094.688438] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1fcb862-d7de-474d-a47e-f47c50b64014 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2094.690783] env[62405]: DEBUG oslo_vmware.api [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for the task: (returnval){ [ 2094.690783] env[62405]: value = "task-1948244" [ 2094.690783] env[62405]: _type = "Task" [ 2094.690783] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2094.710720] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-585b838c-5271-47a1-b746-4383fb2199ea {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2094.715999] env[62405]: DEBUG oslo_vmware.api [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948244, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2094.730012] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-75e8cea4-16db-47e6-865d-e7891df9ad68 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] The volume has not been displaced from its original location: [datastore1] volume-caee8648-7be2-4e64-811e-8bad831e1865/volume-caee8648-7be2-4e64-811e-8bad831e1865.vmdk. No consolidation needed. {{(pid=62405) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2094.735340] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-75e8cea4-16db-47e6-865d-e7891df9ad68 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Reconfiguring VM instance instance-0000006a to detach disk 2001 {{(pid=62405) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2094.735659] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-66b4f1cc-0ae1-4de7-a9ad-1fe6995039d6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2094.753923] env[62405]: DEBUG oslo_vmware.api [None req-75e8cea4-16db-47e6-865d-e7891df9ad68 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Waiting for the task: (returnval){ [ 2094.753923] env[62405]: value = "task-1948245" [ 2094.753923] env[62405]: _type = "Task" [ 2094.753923] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2094.761502] env[62405]: DEBUG oslo_vmware.api [None req-75e8cea4-16db-47e6-865d-e7891df9ad68 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1948245, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2095.041062] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.209s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2095.041282] env[62405]: DEBUG nova.compute.manager [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2095.199884] env[62405]: DEBUG oslo_vmware.api [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948244, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069174} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2095.200293] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 9c30bac3-d4f0-4779-9f6e-bc83bb84b001] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2095.200961] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aee82c9-0753-4009-9338-3fc4ae2d0485 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2095.221799] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 9c30bac3-d4f0-4779-9f6e-bc83bb84b001] Reconfiguring VM instance instance-00000076 to attach disk [datastore1] 9c30bac3-d4f0-4779-9f6e-bc83bb84b001/9c30bac3-d4f0-4779-9f6e-bc83bb84b001.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2095.222039] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cf696290-222b-4947-a88c-a4fcae660957 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2095.240781] env[62405]: DEBUG oslo_vmware.api [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for the task: (returnval){ [ 2095.240781] env[62405]: value = "task-1948246" [ 2095.240781] env[62405]: _type = "Task" [ 2095.240781] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2095.247908] env[62405]: DEBUG oslo_vmware.api [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948246, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2095.261990] env[62405]: DEBUG oslo_vmware.api [None req-75e8cea4-16db-47e6-865d-e7891df9ad68 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1948245, 'name': ReconfigVM_Task, 'duration_secs': 0.22749} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2095.262251] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-75e8cea4-16db-47e6-865d-e7891df9ad68 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Reconfigured VM instance instance-0000006a to detach disk 2001 {{(pid=62405) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2095.266643] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fc43f525-8109-432a-8561-d574fa3c24c0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2095.281171] env[62405]: DEBUG oslo_vmware.api [None req-75e8cea4-16db-47e6-865d-e7891df9ad68 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Waiting for the task: (returnval){ [ 2095.281171] env[62405]: value = "task-1948247" [ 2095.281171] env[62405]: _type = "Task" [ 2095.281171] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2095.288689] env[62405]: DEBUG oslo_vmware.api [None req-75e8cea4-16db-47e6-865d-e7891df9ad68 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1948247, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2095.545995] env[62405]: DEBUG nova.compute.utils [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2095.547528] env[62405]: DEBUG nova.compute.manager [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2095.547707] env[62405]: DEBUG nova.network.neutron [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2095.585124] env[62405]: DEBUG nova.policy [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4d60f47dfb7e4334b9b7ceb5d3c6aaab', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '28cfe90f16b140018a5802c02f751d9c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 2095.751225] env[62405]: DEBUG oslo_vmware.api [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948246, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2095.790183] env[62405]: DEBUG oslo_vmware.api [None req-75e8cea4-16db-47e6-865d-e7891df9ad68 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1948247, 'name': ReconfigVM_Task, 'duration_secs': 0.174333} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2095.790518] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-75e8cea4-16db-47e6-865d-e7891df9ad68 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-401585', 'volume_id': 'caee8648-7be2-4e64-811e-8bad831e1865', 'name': 'volume-caee8648-7be2-4e64-811e-8bad831e1865', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '81d9be97-9147-4754-80c2-68c1a389842e', 'attached_at': '', 'detached_at': '', 'volume_id': 'caee8648-7be2-4e64-811e-8bad831e1865', 'serial': 'caee8648-7be2-4e64-811e-8bad831e1865'} {{(pid=62405) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2095.879215] env[62405]: DEBUG nova.network.neutron [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Successfully created port: 9ccf45be-5a2c-4a79-862c-d1b26508863f {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2096.051461] env[62405]: DEBUG nova.compute.manager [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2096.251343] env[62405]: DEBUG oslo_vmware.api [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948246, 'name': ReconfigVM_Task, 'duration_secs': 0.946977} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2096.251705] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 9c30bac3-d4f0-4779-9f6e-bc83bb84b001] Reconfigured VM instance instance-00000076 to attach disk [datastore1] 9c30bac3-d4f0-4779-9f6e-bc83bb84b001/9c30bac3-d4f0-4779-9f6e-bc83bb84b001.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2096.252284] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-673d5e1b-87a3-45ce-a950-4bfeb124e890 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2096.258814] env[62405]: DEBUG oslo_vmware.api [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for the task: (returnval){ [ 2096.258814] env[62405]: value = "task-1948248" [ 2096.258814] env[62405]: _type = "Task" [ 2096.258814] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2096.268261] env[62405]: DEBUG oslo_vmware.api [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948248, 'name': Rename_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2096.336574] env[62405]: DEBUG nova.objects.instance [None req-75e8cea4-16db-47e6-865d-e7891df9ad68 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Lazy-loading 'flavor' on Instance uuid 81d9be97-9147-4754-80c2-68c1a389842e {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2096.768599] env[62405]: DEBUG oslo_vmware.api [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948248, 'name': Rename_Task, 'duration_secs': 0.151747} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2096.768889] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 9c30bac3-d4f0-4779-9f6e-bc83bb84b001] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2096.769153] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-907eecec-38c8-4455-b184-28ee5f309673 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2096.775296] env[62405]: DEBUG oslo_vmware.api [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for the task: (returnval){ [ 2096.775296] env[62405]: value = "task-1948249" [ 2096.775296] env[62405]: _type = "Task" [ 2096.775296] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2096.782572] env[62405]: DEBUG oslo_vmware.api [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948249, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2097.062310] env[62405]: DEBUG nova.compute.manager [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2097.089757] env[62405]: DEBUG nova.virt.hardware [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2097.090007] env[62405]: DEBUG nova.virt.hardware [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2097.090248] env[62405]: DEBUG nova.virt.hardware [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2097.090452] env[62405]: DEBUG nova.virt.hardware [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2097.090608] env[62405]: DEBUG nova.virt.hardware [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2097.090754] env[62405]: DEBUG nova.virt.hardware [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2097.090961] env[62405]: DEBUG nova.virt.hardware [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2097.091142] env[62405]: DEBUG nova.virt.hardware [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2097.091315] env[62405]: DEBUG nova.virt.hardware [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2097.091478] env[62405]: DEBUG nova.virt.hardware [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2097.091650] env[62405]: DEBUG nova.virt.hardware [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2097.092557] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bccb4311-91ec-4c1f-aeec-61da68248c78 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2097.100721] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93d2f2eb-c072-49b4-8ede-fa3afbc3f6cb {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2097.263128] env[62405]: DEBUG nova.compute.manager [req-b31860db-c210-43f3-b881-a17d04ae5be7 req-d84871a6-c36c-4fff-a3a7-b7294faca10c service nova] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Received event network-vif-plugged-9ccf45be-5a2c-4a79-862c-d1b26508863f {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2097.263391] env[62405]: DEBUG oslo_concurrency.lockutils [req-b31860db-c210-43f3-b881-a17d04ae5be7 req-d84871a6-c36c-4fff-a3a7-b7294faca10c service nova] Acquiring lock "8185f9bc-48d5-4cb7-a48d-f744ff704868-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2097.263578] env[62405]: DEBUG oslo_concurrency.lockutils [req-b31860db-c210-43f3-b881-a17d04ae5be7 req-d84871a6-c36c-4fff-a3a7-b7294faca10c service nova] Lock "8185f9bc-48d5-4cb7-a48d-f744ff704868-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2097.263741] env[62405]: DEBUG oslo_concurrency.lockutils [req-b31860db-c210-43f3-b881-a17d04ae5be7 req-d84871a6-c36c-4fff-a3a7-b7294faca10c service nova] Lock "8185f9bc-48d5-4cb7-a48d-f744ff704868-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2097.263909] env[62405]: DEBUG nova.compute.manager [req-b31860db-c210-43f3-b881-a17d04ae5be7 req-d84871a6-c36c-4fff-a3a7-b7294faca10c service nova] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] No waiting events found dispatching network-vif-plugged-9ccf45be-5a2c-4a79-862c-d1b26508863f {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2097.264088] env[62405]: WARNING nova.compute.manager [req-b31860db-c210-43f3-b881-a17d04ae5be7 req-d84871a6-c36c-4fff-a3a7-b7294faca10c service nova] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Received unexpected event network-vif-plugged-9ccf45be-5a2c-4a79-862c-d1b26508863f for instance with vm_state building and task_state spawning. [ 2097.286644] env[62405]: DEBUG oslo_vmware.api [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948249, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2097.345362] env[62405]: DEBUG oslo_concurrency.lockutils [None req-75e8cea4-16db-47e6-865d-e7891df9ad68 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Lock "81d9be97-9147-4754-80c2-68c1a389842e" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.228s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2097.357440] env[62405]: DEBUG nova.network.neutron [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Successfully updated port: 9ccf45be-5a2c-4a79-862c-d1b26508863f {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2097.786435] env[62405]: DEBUG oslo_vmware.api [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948249, 'name': PowerOnVM_Task, 'duration_secs': 0.706503} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2097.786735] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 9c30bac3-d4f0-4779-9f6e-bc83bb84b001] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2097.786941] env[62405]: INFO nova.compute.manager [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 9c30bac3-d4f0-4779-9f6e-bc83bb84b001] Took 8.43 seconds to spawn the instance on the hypervisor. [ 2097.787165] env[62405]: DEBUG nova.compute.manager [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 9c30bac3-d4f0-4779-9f6e-bc83bb84b001] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2097.787938] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-274d4a6a-2a8a-45dd-b576-058d4ee573d7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2097.859647] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Acquiring lock "refresh_cache-8185f9bc-48d5-4cb7-a48d-f744ff704868" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2097.859803] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Acquired lock "refresh_cache-8185f9bc-48d5-4cb7-a48d-f744ff704868" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2097.859955] env[62405]: DEBUG nova.network.neutron [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2098.304741] env[62405]: INFO nova.compute.manager [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 9c30bac3-d4f0-4779-9f6e-bc83bb84b001] Took 14.07 seconds to build instance. [ 2098.390029] env[62405]: DEBUG nova.network.neutron [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2098.426881] env[62405]: DEBUG oslo_concurrency.lockutils [None req-05449304-f9e7-4173-a278-8b1fdb214fcf tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Acquiring lock "81d9be97-9147-4754-80c2-68c1a389842e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2098.427167] env[62405]: DEBUG oslo_concurrency.lockutils [None req-05449304-f9e7-4173-a278-8b1fdb214fcf tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Lock "81d9be97-9147-4754-80c2-68c1a389842e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2098.427387] env[62405]: DEBUG oslo_concurrency.lockutils [None req-05449304-f9e7-4173-a278-8b1fdb214fcf tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Acquiring lock "81d9be97-9147-4754-80c2-68c1a389842e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2098.427574] env[62405]: DEBUG oslo_concurrency.lockutils [None req-05449304-f9e7-4173-a278-8b1fdb214fcf tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Lock "81d9be97-9147-4754-80c2-68c1a389842e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2098.427744] env[62405]: DEBUG oslo_concurrency.lockutils [None req-05449304-f9e7-4173-a278-8b1fdb214fcf tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Lock "81d9be97-9147-4754-80c2-68c1a389842e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2098.429691] env[62405]: INFO nova.compute.manager [None req-05449304-f9e7-4173-a278-8b1fdb214fcf tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Terminating instance [ 2098.516826] env[62405]: DEBUG nova.network.neutron [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Updating instance_info_cache with network_info: [{"id": "9ccf45be-5a2c-4a79-862c-d1b26508863f", "address": "fa:16:3e:44:07:1c", "network": {"id": "feb681f7-0a8f-4000-89ec-88a027ee7f15", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-478938422-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28cfe90f16b140018a5802c02f751d9c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c894ab55-c869-4530-9702-cb46d173ce94", "external-id": "nsx-vlan-transportzone-792", "segmentation_id": 792, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9ccf45be-5a", "ovs_interfaceid": "9ccf45be-5a2c-4a79-862c-d1b26508863f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2098.735912] env[62405]: DEBUG oslo_concurrency.lockutils [None req-967b670a-267d-4fee-9419-d92658c98941 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Acquiring lock "9c30bac3-d4f0-4779-9f6e-bc83bb84b001" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2098.806535] env[62405]: DEBUG oslo_concurrency.lockutils [None req-ce90a30a-6130-4d90-9147-031c973ee2a7 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Lock "9c30bac3-d4f0-4779-9f6e-bc83bb84b001" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.582s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2098.806784] env[62405]: DEBUG oslo_concurrency.lockutils [None req-967b670a-267d-4fee-9419-d92658c98941 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Lock "9c30bac3-d4f0-4779-9f6e-bc83bb84b001" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.071s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2098.806968] env[62405]: DEBUG nova.compute.manager [None req-967b670a-267d-4fee-9419-d92658c98941 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 9c30bac3-d4f0-4779-9f6e-bc83bb84b001] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2098.807885] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a99dcfb3-d5fe-4156-8677-e3fe2d38d186 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2098.814838] env[62405]: DEBUG nova.compute.manager [None req-967b670a-267d-4fee-9419-d92658c98941 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 9c30bac3-d4f0-4779-9f6e-bc83bb84b001] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62405) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 2098.815422] env[62405]: DEBUG nova.objects.instance [None req-967b670a-267d-4fee-9419-d92658c98941 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Lazy-loading 'flavor' on Instance uuid 9c30bac3-d4f0-4779-9f6e-bc83bb84b001 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2098.933954] env[62405]: DEBUG nova.compute.manager [None req-05449304-f9e7-4173-a278-8b1fdb214fcf tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2098.934179] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-05449304-f9e7-4173-a278-8b1fdb214fcf tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2098.935059] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05c23226-af62-43e2-a96c-f2745ca3da73 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2098.942936] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-05449304-f9e7-4173-a278-8b1fdb214fcf tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2098.943181] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5f30512c-351c-4f5f-bcf5-5580a7d75ba7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2098.949198] env[62405]: DEBUG oslo_vmware.api [None req-05449304-f9e7-4173-a278-8b1fdb214fcf tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Waiting for the task: (returnval){ [ 2098.949198] env[62405]: value = "task-1948250" [ 2098.949198] env[62405]: _type = "Task" [ 2098.949198] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2098.956859] env[62405]: DEBUG oslo_vmware.api [None req-05449304-f9e7-4173-a278-8b1fdb214fcf tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1948250, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2099.019221] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Releasing lock "refresh_cache-8185f9bc-48d5-4cb7-a48d-f744ff704868" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2099.019571] env[62405]: DEBUG nova.compute.manager [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Instance network_info: |[{"id": "9ccf45be-5a2c-4a79-862c-d1b26508863f", "address": "fa:16:3e:44:07:1c", "network": {"id": "feb681f7-0a8f-4000-89ec-88a027ee7f15", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-478938422-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28cfe90f16b140018a5802c02f751d9c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c894ab55-c869-4530-9702-cb46d173ce94", "external-id": "nsx-vlan-transportzone-792", "segmentation_id": 792, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9ccf45be-5a", "ovs_interfaceid": "9ccf45be-5a2c-4a79-862c-d1b26508863f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2099.020015] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:44:07:1c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c894ab55-c869-4530-9702-cb46d173ce94', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9ccf45be-5a2c-4a79-862c-d1b26508863f', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2099.027708] env[62405]: DEBUG oslo.service.loopingcall [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2099.027936] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2099.028186] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3851bc4b-45d7-472e-9b30-a94cdbade9a5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2099.048025] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2099.048025] env[62405]: value = "task-1948251" [ 2099.048025] env[62405]: _type = "Task" [ 2099.048025] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2099.055980] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1948251, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2099.288936] env[62405]: DEBUG nova.compute.manager [req-35f140c2-420a-428a-aa51-1a03befcf55c req-ac53302d-4c72-4153-9e1e-3c84ed737667 service nova] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Received event network-changed-9ccf45be-5a2c-4a79-862c-d1b26508863f {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2099.289311] env[62405]: DEBUG nova.compute.manager [req-35f140c2-420a-428a-aa51-1a03befcf55c req-ac53302d-4c72-4153-9e1e-3c84ed737667 service nova] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Refreshing instance network info cache due to event network-changed-9ccf45be-5a2c-4a79-862c-d1b26508863f. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 2099.289689] env[62405]: DEBUG oslo_concurrency.lockutils [req-35f140c2-420a-428a-aa51-1a03befcf55c req-ac53302d-4c72-4153-9e1e-3c84ed737667 service nova] Acquiring lock "refresh_cache-8185f9bc-48d5-4cb7-a48d-f744ff704868" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2099.289981] env[62405]: DEBUG oslo_concurrency.lockutils [req-35f140c2-420a-428a-aa51-1a03befcf55c req-ac53302d-4c72-4153-9e1e-3c84ed737667 service nova] Acquired lock "refresh_cache-8185f9bc-48d5-4cb7-a48d-f744ff704868" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2099.290336] env[62405]: DEBUG nova.network.neutron [req-35f140c2-420a-428a-aa51-1a03befcf55c req-ac53302d-4c72-4153-9e1e-3c84ed737667 service nova] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Refreshing network info cache for port 9ccf45be-5a2c-4a79-862c-d1b26508863f {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2099.459355] env[62405]: DEBUG oslo_vmware.api [None req-05449304-f9e7-4173-a278-8b1fdb214fcf tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1948250, 'name': PowerOffVM_Task, 'duration_secs': 0.209396} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2099.459754] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-05449304-f9e7-4173-a278-8b1fdb214fcf tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2099.459829] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-05449304-f9e7-4173-a278-8b1fdb214fcf tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2099.460063] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c4c89fc4-8dab-49a0-add4-495881f1bf1a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2099.560015] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1948251, 'name': CreateVM_Task, 'duration_secs': 0.45612} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2099.560015] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2099.560625] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2099.560852] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2099.561122] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2099.561371] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-63eeb54c-c760-4e05-a6c4-4f202f919e58 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2099.565496] env[62405]: DEBUG oslo_vmware.api [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Waiting for the task: (returnval){ [ 2099.565496] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52d0cfa1-ef9d-24ec-9968-4b27d1c51d3a" [ 2099.565496] env[62405]: _type = "Task" [ 2099.565496] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2099.572667] env[62405]: DEBUG oslo_vmware.api [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52d0cfa1-ef9d-24ec-9968-4b27d1c51d3a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2099.710874] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-05449304-f9e7-4173-a278-8b1fdb214fcf tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2099.711084] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-05449304-f9e7-4173-a278-8b1fdb214fcf tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2099.711275] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-05449304-f9e7-4173-a278-8b1fdb214fcf tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Deleting the datastore file [datastore1] 81d9be97-9147-4754-80c2-68c1a389842e {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2099.711549] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4ace137f-3289-49d7-9133-08a4fdc7b09e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2099.717498] env[62405]: DEBUG oslo_vmware.api [None req-05449304-f9e7-4173-a278-8b1fdb214fcf tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Waiting for the task: (returnval){ [ 2099.717498] env[62405]: value = "task-1948253" [ 2099.717498] env[62405]: _type = "Task" [ 2099.717498] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2099.725240] env[62405]: DEBUG oslo_vmware.api [None req-05449304-f9e7-4173-a278-8b1fdb214fcf tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1948253, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2099.822817] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-967b670a-267d-4fee-9419-d92658c98941 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 9c30bac3-d4f0-4779-9f6e-bc83bb84b001] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2099.823172] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-545b0f65-0c41-4478-bfb5-9073e53f7a70 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2099.830773] env[62405]: DEBUG oslo_vmware.api [None req-967b670a-267d-4fee-9419-d92658c98941 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for the task: (returnval){ [ 2099.830773] env[62405]: value = "task-1948254" [ 2099.830773] env[62405]: _type = "Task" [ 2099.830773] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2099.840147] env[62405]: DEBUG oslo_vmware.api [None req-967b670a-267d-4fee-9419-d92658c98941 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948254, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2100.016535] env[62405]: DEBUG nova.network.neutron [req-35f140c2-420a-428a-aa51-1a03befcf55c req-ac53302d-4c72-4153-9e1e-3c84ed737667 service nova] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Updated VIF entry in instance network info cache for port 9ccf45be-5a2c-4a79-862c-d1b26508863f. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2100.016946] env[62405]: DEBUG nova.network.neutron [req-35f140c2-420a-428a-aa51-1a03befcf55c req-ac53302d-4c72-4153-9e1e-3c84ed737667 service nova] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Updating instance_info_cache with network_info: [{"id": "9ccf45be-5a2c-4a79-862c-d1b26508863f", "address": "fa:16:3e:44:07:1c", "network": {"id": "feb681f7-0a8f-4000-89ec-88a027ee7f15", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-478938422-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28cfe90f16b140018a5802c02f751d9c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c894ab55-c869-4530-9702-cb46d173ce94", "external-id": "nsx-vlan-transportzone-792", "segmentation_id": 792, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9ccf45be-5a", "ovs_interfaceid": "9ccf45be-5a2c-4a79-862c-d1b26508863f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2100.075534] env[62405]: DEBUG oslo_vmware.api [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52d0cfa1-ef9d-24ec-9968-4b27d1c51d3a, 'name': SearchDatastore_Task, 'duration_secs': 0.009258} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2100.075841] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2100.076087] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2100.076328] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2100.076501] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2100.076686] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2100.076938] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dbbd0f5e-196d-4467-bed9-5befe769e646 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2100.227269] env[62405]: DEBUG oslo_vmware.api [None req-05449304-f9e7-4173-a278-8b1fdb214fcf tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1948253, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.220635} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2100.227563] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-05449304-f9e7-4173-a278-8b1fdb214fcf tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2100.227756] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-05449304-f9e7-4173-a278-8b1fdb214fcf tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2100.227937] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-05449304-f9e7-4173-a278-8b1fdb214fcf tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2100.228213] env[62405]: INFO nova.compute.manager [None req-05449304-f9e7-4173-a278-8b1fdb214fcf tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Took 1.29 seconds to destroy the instance on the hypervisor. [ 2100.228481] env[62405]: DEBUG oslo.service.loopingcall [None req-05449304-f9e7-4173-a278-8b1fdb214fcf tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2100.228675] env[62405]: DEBUG nova.compute.manager [-] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2100.228769] env[62405]: DEBUG nova.network.neutron [-] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2100.340836] env[62405]: DEBUG oslo_vmware.api [None req-967b670a-267d-4fee-9419-d92658c98941 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948254, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2100.520243] env[62405]: DEBUG oslo_concurrency.lockutils [req-35f140c2-420a-428a-aa51-1a03befcf55c req-ac53302d-4c72-4153-9e1e-3c84ed737667 service nova] Releasing lock "refresh_cache-8185f9bc-48d5-4cb7-a48d-f744ff704868" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2100.841983] env[62405]: DEBUG oslo_vmware.api [None req-967b670a-267d-4fee-9419-d92658c98941 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948254, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2101.211387] env[62405]: DEBUG nova.network.neutron [-] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2101.316251] env[62405]: DEBUG nova.compute.manager [req-96e603f4-c6b5-45a7-8f28-185561069f5c req-14ee0931-9d7a-4563-a87e-408407f51161 service nova] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Received event network-vif-deleted-2ba16494-2db9-4083-9a27-d4f12dac6ba1 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2101.342322] env[62405]: DEBUG oslo_vmware.api [None req-967b670a-267d-4fee-9419-d92658c98941 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948254, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2101.575791] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2101.576157] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2101.576751] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0a5599c1-117f-40a3-8f4d-f63e02f55190 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2101.582358] env[62405]: DEBUG oslo_vmware.api [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Waiting for the task: (returnval){ [ 2101.582358] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52a80b92-bafa-bd7b-9433-fdc674af69a1" [ 2101.582358] env[62405]: _type = "Task" [ 2101.582358] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2101.590212] env[62405]: DEBUG oslo_vmware.api [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52a80b92-bafa-bd7b-9433-fdc674af69a1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2101.714031] env[62405]: INFO nova.compute.manager [-] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Took 1.49 seconds to deallocate network for instance. [ 2101.842788] env[62405]: DEBUG oslo_vmware.api [None req-967b670a-267d-4fee-9419-d92658c98941 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948254, 'name': PowerOffVM_Task, 'duration_secs': 1.810932} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2101.843072] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-967b670a-267d-4fee-9419-d92658c98941 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 9c30bac3-d4f0-4779-9f6e-bc83bb84b001] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2101.843284] env[62405]: DEBUG nova.compute.manager [None req-967b670a-267d-4fee-9419-d92658c98941 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 9c30bac3-d4f0-4779-9f6e-bc83bb84b001] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2101.844039] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93ab01fe-b7d2-4bee-bd3b-65a6fafe16c5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2102.093074] env[62405]: DEBUG oslo_vmware.api [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52a80b92-bafa-bd7b-9433-fdc674af69a1, 'name': SearchDatastore_Task, 'duration_secs': 0.014222} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2102.093872] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-09888b45-2ff7-406f-b0d8-1dc04e5099c5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2102.099216] env[62405]: DEBUG oslo_vmware.api [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Waiting for the task: (returnval){ [ 2102.099216] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52000e56-452e-c607-168b-0dd8ed4ab267" [ 2102.099216] env[62405]: _type = "Task" [ 2102.099216] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2102.106635] env[62405]: DEBUG oslo_vmware.api [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52000e56-452e-c607-168b-0dd8ed4ab267, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2102.220610] env[62405]: DEBUG oslo_concurrency.lockutils [None req-05449304-f9e7-4173-a278-8b1fdb214fcf tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2102.220890] env[62405]: DEBUG oslo_concurrency.lockutils [None req-05449304-f9e7-4173-a278-8b1fdb214fcf tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2102.221133] env[62405]: DEBUG nova.objects.instance [None req-05449304-f9e7-4173-a278-8b1fdb214fcf tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Lazy-loading 'resources' on Instance uuid 81d9be97-9147-4754-80c2-68c1a389842e {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2102.356485] env[62405]: DEBUG oslo_concurrency.lockutils [None req-967b670a-267d-4fee-9419-d92658c98941 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Lock "9c30bac3-d4f0-4779-9f6e-bc83bb84b001" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 3.549s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2102.609450] env[62405]: DEBUG oslo_vmware.api [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52000e56-452e-c607-168b-0dd8ed4ab267, 'name': SearchDatastore_Task, 'duration_secs': 0.009517} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2102.609838] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2102.609953] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 8185f9bc-48d5-4cb7-a48d-f744ff704868/8185f9bc-48d5-4cb7-a48d-f744ff704868.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2102.610203] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0cdf19e5-ea61-480c-a213-65f52a957ef9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2102.616958] env[62405]: DEBUG oslo_vmware.api [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Waiting for the task: (returnval){ [ 2102.616958] env[62405]: value = "task-1948255" [ 2102.616958] env[62405]: _type = "Task" [ 2102.616958] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2102.623980] env[62405]: DEBUG oslo_vmware.api [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948255, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2102.885113] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b7b76dd-adff-4ff5-a789-ff06889b449d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2102.893813] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa70d4ee-a037-48c3-97af-905f9d1ee4e7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2102.927351] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8a73bf7-38c2-4f85-b5d6-5e83f14b99e1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2102.937417] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b323efd-cf46-4161-ac95-b7a5ff45d132 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2102.958262] env[62405]: DEBUG nova.compute.provider_tree [None req-05449304-f9e7-4173-a278-8b1fdb214fcf tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2103.126299] env[62405]: DEBUG oslo_vmware.api [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948255, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2103.447353] env[62405]: INFO nova.compute.manager [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 9c30bac3-d4f0-4779-9f6e-bc83bb84b001] Rebuilding instance [ 2103.481217] env[62405]: ERROR nova.scheduler.client.report [None req-05449304-f9e7-4173-a278-8b1fdb214fcf tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [req-da72c582-1385-44ed-a1ad-f0964941d7f5] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7d5eded7-a501-4fa6-b1d3-60e273d555d7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-da72c582-1385-44ed-a1ad-f0964941d7f5"}]} [ 2103.490505] env[62405]: DEBUG nova.compute.manager [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 9c30bac3-d4f0-4779-9f6e-bc83bb84b001] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2103.491525] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6e7379e-5f91-4ee2-9edc-1a418461996a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2103.496220] env[62405]: DEBUG nova.scheduler.client.report [None req-05449304-f9e7-4173-a278-8b1fdb214fcf tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Refreshing inventories for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 2103.509945] env[62405]: DEBUG nova.scheduler.client.report [None req-05449304-f9e7-4173-a278-8b1fdb214fcf tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Updating ProviderTree inventory for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 2103.510112] env[62405]: DEBUG nova.compute.provider_tree [None req-05449304-f9e7-4173-a278-8b1fdb214fcf tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2103.521244] env[62405]: DEBUG nova.scheduler.client.report [None req-05449304-f9e7-4173-a278-8b1fdb214fcf tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Refreshing aggregate associations for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7, aggregates: None {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 2103.538968] env[62405]: DEBUG nova.scheduler.client.report [None req-05449304-f9e7-4173-a278-8b1fdb214fcf tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Refreshing trait associations for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 2103.626837] env[62405]: DEBUG oslo_vmware.api [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948255, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.529349} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2103.628979] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 8185f9bc-48d5-4cb7-a48d-f744ff704868/8185f9bc-48d5-4cb7-a48d-f744ff704868.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2103.629216] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2103.629617] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fcf5951d-fa29-4efd-af53-c17f515fda89 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2103.635302] env[62405]: DEBUG oslo_vmware.api [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Waiting for the task: (returnval){ [ 2103.635302] env[62405]: value = "task-1948256" [ 2103.635302] env[62405]: _type = "Task" [ 2103.635302] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2103.644663] env[62405]: DEBUG oslo_vmware.api [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948256, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2103.659301] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1d08e1f-3f8d-419e-a6b7-c3fd2bda1e8c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2103.666691] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6d5343f-28ab-4543-9fd6-cb231adfb179 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2103.697326] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d3bbe46-0f01-4637-b286-ac3250deb7a0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2103.704209] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-000bc441-891c-4b9b-bd3a-e6b0aa8ad117 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2103.717231] env[62405]: DEBUG nova.compute.provider_tree [None req-05449304-f9e7-4173-a278-8b1fdb214fcf tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2104.144963] env[62405]: DEBUG oslo_vmware.api [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948256, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.059139} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2104.145552] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2104.146305] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb4e8110-b408-48dc-b618-713b36871380 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2104.167458] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Reconfiguring VM instance instance-00000077 to attach disk [datastore1] 8185f9bc-48d5-4cb7-a48d-f744ff704868/8185f9bc-48d5-4cb7-a48d-f744ff704868.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2104.167699] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-51617c2c-48df-4815-a3a1-3f131ecdd80d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2104.186455] env[62405]: DEBUG oslo_vmware.api [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Waiting for the task: (returnval){ [ 2104.186455] env[62405]: value = "task-1948257" [ 2104.186455] env[62405]: _type = "Task" [ 2104.186455] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2104.194082] env[62405]: DEBUG oslo_vmware.api [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948257, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2104.246491] env[62405]: DEBUG nova.scheduler.client.report [None req-05449304-f9e7-4173-a278-8b1fdb214fcf tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Updated inventory for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with generation 182 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 2104.246804] env[62405]: DEBUG nova.compute.provider_tree [None req-05449304-f9e7-4173-a278-8b1fdb214fcf tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Updating resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 generation from 182 to 183 during operation: update_inventory {{(pid=62405) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2104.247014] env[62405]: DEBUG nova.compute.provider_tree [None req-05449304-f9e7-4173-a278-8b1fdb214fcf tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2104.303742] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c046b608-6961-4099-87f3-7202df48bdd6 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Acquiring lock "f1e9a2e7-0fd3-4a89-8c33-bab6d1987230" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2104.303977] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c046b608-6961-4099-87f3-7202df48bdd6 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Lock "f1e9a2e7-0fd3-4a89-8c33-bab6d1987230" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2104.510768] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 9c30bac3-d4f0-4779-9f6e-bc83bb84b001] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2104.511137] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-861025b9-185a-4e58-b181-9d560bf894be {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2104.518840] env[62405]: DEBUG oslo_vmware.api [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for the task: (returnval){ [ 2104.518840] env[62405]: value = "task-1948258" [ 2104.518840] env[62405]: _type = "Task" [ 2104.518840] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2104.526627] env[62405]: DEBUG oslo_vmware.api [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948258, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2104.696104] env[62405]: DEBUG oslo_vmware.api [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948257, 'name': ReconfigVM_Task, 'duration_secs': 0.362958} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2104.696439] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Reconfigured VM instance instance-00000077 to attach disk [datastore1] 8185f9bc-48d5-4cb7-a48d-f744ff704868/8185f9bc-48d5-4cb7-a48d-f744ff704868.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2104.697025] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-94abb4dd-32db-4eaf-89e2-86d6fdcb55d5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2104.703203] env[62405]: DEBUG oslo_vmware.api [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Waiting for the task: (returnval){ [ 2104.703203] env[62405]: value = "task-1948259" [ 2104.703203] env[62405]: _type = "Task" [ 2104.703203] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2104.711882] env[62405]: DEBUG oslo_vmware.api [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948259, 'name': Rename_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2104.751960] env[62405]: DEBUG oslo_concurrency.lockutils [None req-05449304-f9e7-4173-a278-8b1fdb214fcf tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.531s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2104.769712] env[62405]: INFO nova.scheduler.client.report [None req-05449304-f9e7-4173-a278-8b1fdb214fcf tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Deleted allocations for instance 81d9be97-9147-4754-80c2-68c1a389842e [ 2104.807369] env[62405]: DEBUG nova.compute.utils [None req-c046b608-6961-4099-87f3-7202df48bdd6 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2105.030062] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 9c30bac3-d4f0-4779-9f6e-bc83bb84b001] VM already powered off {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 2105.030062] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 9c30bac3-d4f0-4779-9f6e-bc83bb84b001] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2105.030062] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4a8e5d2-0b37-4e1b-912e-cd1dc585eb32 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2105.036062] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 9c30bac3-d4f0-4779-9f6e-bc83bb84b001] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2105.036283] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-815d0b0e-85a1-42ef-a9ff-cddb1c87e3a4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2105.212764] env[62405]: DEBUG oslo_vmware.api [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948259, 'name': Rename_Task, 'duration_secs': 0.146746} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2105.213061] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2105.213301] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e65151c4-f2c4-4531-ab0a-ac91ff8f0671 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2105.220423] env[62405]: DEBUG oslo_vmware.api [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Waiting for the task: (returnval){ [ 2105.220423] env[62405]: value = "task-1948261" [ 2105.220423] env[62405]: _type = "Task" [ 2105.220423] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2105.227843] env[62405]: DEBUG oslo_vmware.api [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948261, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2105.278187] env[62405]: DEBUG oslo_concurrency.lockutils [None req-05449304-f9e7-4173-a278-8b1fdb214fcf tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Lock "81d9be97-9147-4754-80c2-68c1a389842e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.851s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2105.310633] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c046b608-6961-4099-87f3-7202df48bdd6 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Lock "f1e9a2e7-0fd3-4a89-8c33-bab6d1987230" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2105.730863] env[62405]: DEBUG oslo_vmware.api [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948261, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2106.231185] env[62405]: DEBUG oslo_vmware.api [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948261, 'name': PowerOnVM_Task, 'duration_secs': 0.602204} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2106.231549] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2106.231763] env[62405]: INFO nova.compute.manager [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Took 9.17 seconds to spawn the instance on the hypervisor. [ 2106.231945] env[62405]: DEBUG nova.compute.manager [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2106.232710] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28c09155-35f9-4683-8b87-bc6f9d42eece {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2106.377673] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c046b608-6961-4099-87f3-7202df48bdd6 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Acquiring lock "f1e9a2e7-0fd3-4a89-8c33-bab6d1987230" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2106.377958] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c046b608-6961-4099-87f3-7202df48bdd6 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Lock "f1e9a2e7-0fd3-4a89-8c33-bab6d1987230" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2106.378221] env[62405]: INFO nova.compute.manager [None req-c046b608-6961-4099-87f3-7202df48bdd6 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] Attaching volume cd1772e2-3baa-4a6d-ad99-752be5e16145 to /dev/sdb [ 2106.408129] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb69b36e-d97c-486c-a5ba-e04aecc1393b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2106.415461] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0372f58f-4ca9-4efe-814f-668483dfc99c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2106.429324] env[62405]: DEBUG nova.virt.block_device [None req-c046b608-6961-4099-87f3-7202df48bdd6 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] Updating existing volume attachment record: a70fa1f5-d78e-4780-adef-bbd51af51b6a {{(pid=62405) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 2106.748500] env[62405]: INFO nova.compute.manager [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Took 14.04 seconds to build instance. [ 2107.250676] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c82a91d4-71eb-4d98-a147-04d47aa3a920 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Lock "8185f9bc-48d5-4cb7-a48d-f744ff704868" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.555s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2107.467056] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 9c30bac3-d4f0-4779-9f6e-bc83bb84b001] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2107.467374] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 9c30bac3-d4f0-4779-9f6e-bc83bb84b001] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2107.467578] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Deleting the datastore file [datastore1] 9c30bac3-d4f0-4779-9f6e-bc83bb84b001 {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2107.467870] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ba3b7e8e-0962-4bd4-9fc6-d45980cde514 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2107.474822] env[62405]: DEBUG oslo_vmware.api [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for the task: (returnval){ [ 2107.474822] env[62405]: value = "task-1948264" [ 2107.474822] env[62405]: _type = "Task" [ 2107.474822] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2107.483227] env[62405]: DEBUG oslo_vmware.api [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948264, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2107.634763] env[62405]: DEBUG nova.compute.manager [req-82a36a22-d33c-499c-8e2c-06989aaa44f7 req-427f8146-a8ca-49a9-a467-9302325b423a service nova] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Received event network-changed-9ccf45be-5a2c-4a79-862c-d1b26508863f {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2107.634967] env[62405]: DEBUG nova.compute.manager [req-82a36a22-d33c-499c-8e2c-06989aaa44f7 req-427f8146-a8ca-49a9-a467-9302325b423a service nova] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Refreshing instance network info cache due to event network-changed-9ccf45be-5a2c-4a79-862c-d1b26508863f. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 2107.635203] env[62405]: DEBUG oslo_concurrency.lockutils [req-82a36a22-d33c-499c-8e2c-06989aaa44f7 req-427f8146-a8ca-49a9-a467-9302325b423a service nova] Acquiring lock "refresh_cache-8185f9bc-48d5-4cb7-a48d-f744ff704868" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2107.635356] env[62405]: DEBUG oslo_concurrency.lockutils [req-82a36a22-d33c-499c-8e2c-06989aaa44f7 req-427f8146-a8ca-49a9-a467-9302325b423a service nova] Acquired lock "refresh_cache-8185f9bc-48d5-4cb7-a48d-f744ff704868" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2107.635517] env[62405]: DEBUG nova.network.neutron [req-82a36a22-d33c-499c-8e2c-06989aaa44f7 req-427f8146-a8ca-49a9-a467-9302325b423a service nova] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Refreshing network info cache for port 9ccf45be-5a2c-4a79-862c-d1b26508863f {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2107.984946] env[62405]: DEBUG oslo_vmware.api [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948264, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2108.074499] env[62405]: DEBUG oslo_concurrency.lockutils [None req-6926c9b1-e0bc-4d4c-92c3-115e1176b377 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Acquiring lock "7c74cae9-1607-4928-a927-f0c8b86f7698" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2108.074758] env[62405]: DEBUG oslo_concurrency.lockutils [None req-6926c9b1-e0bc-4d4c-92c3-115e1176b377 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Lock "7c74cae9-1607-4928-a927-f0c8b86f7698" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2108.365713] env[62405]: DEBUG nova.network.neutron [req-82a36a22-d33c-499c-8e2c-06989aaa44f7 req-427f8146-a8ca-49a9-a467-9302325b423a service nova] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Updated VIF entry in instance network info cache for port 9ccf45be-5a2c-4a79-862c-d1b26508863f. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2108.367218] env[62405]: DEBUG nova.network.neutron [req-82a36a22-d33c-499c-8e2c-06989aaa44f7 req-427f8146-a8ca-49a9-a467-9302325b423a service nova] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Updating instance_info_cache with network_info: [{"id": "9ccf45be-5a2c-4a79-862c-d1b26508863f", "address": "fa:16:3e:44:07:1c", "network": {"id": "feb681f7-0a8f-4000-89ec-88a027ee7f15", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-478938422-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.193", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28cfe90f16b140018a5802c02f751d9c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c894ab55-c869-4530-9702-cb46d173ce94", "external-id": "nsx-vlan-transportzone-792", "segmentation_id": 792, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9ccf45be-5a", "ovs_interfaceid": "9ccf45be-5a2c-4a79-862c-d1b26508863f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2108.486107] env[62405]: DEBUG oslo_vmware.api [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948264, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.570867} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2108.486437] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2108.486723] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 9c30bac3-d4f0-4779-9f6e-bc83bb84b001] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2108.486943] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 9c30bac3-d4f0-4779-9f6e-bc83bb84b001] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2108.578504] env[62405]: DEBUG nova.compute.utils [None req-6926c9b1-e0bc-4d4c-92c3-115e1176b377 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2108.869081] env[62405]: DEBUG oslo_concurrency.lockutils [req-82a36a22-d33c-499c-8e2c-06989aaa44f7 req-427f8146-a8ca-49a9-a467-9302325b423a service nova] Releasing lock "refresh_cache-8185f9bc-48d5-4cb7-a48d-f744ff704868" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2109.084456] env[62405]: DEBUG oslo_concurrency.lockutils [None req-6926c9b1-e0bc-4d4c-92c3-115e1176b377 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Lock "7c74cae9-1607-4928-a927-f0c8b86f7698" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.009s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2109.523267] env[62405]: DEBUG nova.virt.hardware [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2109.523522] env[62405]: DEBUG nova.virt.hardware [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2109.523683] env[62405]: DEBUG nova.virt.hardware [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2109.523867] env[62405]: DEBUG nova.virt.hardware [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2109.524026] env[62405]: DEBUG nova.virt.hardware [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2109.524186] env[62405]: DEBUG nova.virt.hardware [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2109.524474] env[62405]: DEBUG nova.virt.hardware [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2109.524577] env[62405]: DEBUG nova.virt.hardware [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2109.524732] env[62405]: DEBUG nova.virt.hardware [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2109.524898] env[62405]: DEBUG nova.virt.hardware [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2109.525091] env[62405]: DEBUG nova.virt.hardware [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2109.525960] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b823e83e-3c34-4d80-947c-3e0d07be7718 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2109.534351] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-734be9be-5e90-4531-83d0-7c2ca1f85893 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2109.547638] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 9c30bac3-d4f0-4779-9f6e-bc83bb84b001] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:aa:72:10', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '77aa121f-8fb6-42f3-aaea-43addfe449b2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6abc3e61-4638-4911-b589-f37ab143d2b1', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2109.555095] env[62405]: DEBUG oslo.service.loopingcall [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2109.555356] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9c30bac3-d4f0-4779-9f6e-bc83bb84b001] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2109.555570] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-47b8b6d7-8385-4f62-8a0b-011307a04def {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2109.574729] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2109.574729] env[62405]: value = "task-1948266" [ 2109.574729] env[62405]: _type = "Task" [ 2109.574729] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2109.583923] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1948266, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2110.085576] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1948266, 'name': CreateVM_Task} progress is 25%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2110.147562] env[62405]: DEBUG oslo_concurrency.lockutils [None req-6926c9b1-e0bc-4d4c-92c3-115e1176b377 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Acquiring lock "7c74cae9-1607-4928-a927-f0c8b86f7698" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2110.147843] env[62405]: DEBUG oslo_concurrency.lockutils [None req-6926c9b1-e0bc-4d4c-92c3-115e1176b377 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Lock "7c74cae9-1607-4928-a927-f0c8b86f7698" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2110.148115] env[62405]: INFO nova.compute.manager [None req-6926c9b1-e0bc-4d4c-92c3-115e1176b377 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 7c74cae9-1607-4928-a927-f0c8b86f7698] Attaching volume becfbecd-8a34-4ed5-b62e-25e975b48a2f to /dev/sdb [ 2110.177533] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fcc833c-b23c-460f-b6cf-ec7de47cb8c9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2110.184813] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8f2d238-6251-4b91-8583-052502438485 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2110.198742] env[62405]: DEBUG nova.virt.block_device [None req-6926c9b1-e0bc-4d4c-92c3-115e1176b377 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 7c74cae9-1607-4928-a927-f0c8b86f7698] Updating existing volume attachment record: 8a16bf08-f754-4b67-aad1-67c91c545d1f {{(pid=62405) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 2110.585994] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1948266, 'name': CreateVM_Task, 'duration_secs': 0.787793} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2110.586169] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9c30bac3-d4f0-4779-9f6e-bc83bb84b001] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2110.586780] env[62405]: DEBUG oslo_concurrency.lockutils [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2110.586954] env[62405]: DEBUG oslo_concurrency.lockutils [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2110.587309] env[62405]: DEBUG oslo_concurrency.lockutils [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2110.587600] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a3b9e04c-5163-45cd-847e-0478172a293e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2110.592356] env[62405]: DEBUG oslo_vmware.api [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for the task: (returnval){ [ 2110.592356] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]527358dc-ff76-806c-b41d-62fd06dfc978" [ 2110.592356] env[62405]: _type = "Task" [ 2110.592356] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2110.600035] env[62405]: DEBUG oslo_vmware.api [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]527358dc-ff76-806c-b41d-62fd06dfc978, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2110.710023] env[62405]: DEBUG oslo_concurrency.lockutils [None req-29b135bf-9e8b-4a91-bef7-c5a829755258 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Acquiring lock "0d2b305d-d754-413c-afdf-3a2e8f143891" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2110.710380] env[62405]: DEBUG oslo_concurrency.lockutils [None req-29b135bf-9e8b-4a91-bef7-c5a829755258 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Lock "0d2b305d-d754-413c-afdf-3a2e8f143891" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2110.710617] env[62405]: DEBUG oslo_concurrency.lockutils [None req-29b135bf-9e8b-4a91-bef7-c5a829755258 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Acquiring lock "0d2b305d-d754-413c-afdf-3a2e8f143891-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2110.710809] env[62405]: DEBUG oslo_concurrency.lockutils [None req-29b135bf-9e8b-4a91-bef7-c5a829755258 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Lock "0d2b305d-d754-413c-afdf-3a2e8f143891-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2110.710998] env[62405]: DEBUG oslo_concurrency.lockutils [None req-29b135bf-9e8b-4a91-bef7-c5a829755258 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Lock "0d2b305d-d754-413c-afdf-3a2e8f143891-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2110.713180] env[62405]: INFO nova.compute.manager [None req-29b135bf-9e8b-4a91-bef7-c5a829755258 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 0d2b305d-d754-413c-afdf-3a2e8f143891] Terminating instance [ 2110.972445] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-c046b608-6961-4099-87f3-7202df48bdd6 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] Volume attach. Driver type: vmdk {{(pid=62405) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 2110.972725] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-c046b608-6961-4099-87f3-7202df48bdd6 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-401600', 'volume_id': 'cd1772e2-3baa-4a6d-ad99-752be5e16145', 'name': 'volume-cd1772e2-3baa-4a6d-ad99-752be5e16145', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f1e9a2e7-0fd3-4a89-8c33-bab6d1987230', 'attached_at': '', 'detached_at': '', 'volume_id': 'cd1772e2-3baa-4a6d-ad99-752be5e16145', 'serial': 'cd1772e2-3baa-4a6d-ad99-752be5e16145'} {{(pid=62405) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 2110.973832] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6d8734c-4636-4881-ad47-b5b21522f7e8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2110.990133] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aabbbb85-3372-48bd-90bd-22f548f6f377 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2111.015761] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-c046b608-6961-4099-87f3-7202df48bdd6 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] Reconfiguring VM instance instance-00000071 to attach disk [datastore1] volume-cd1772e2-3baa-4a6d-ad99-752be5e16145/volume-cd1772e2-3baa-4a6d-ad99-752be5e16145.vmdk or device None with type thin {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2111.016051] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-73f7b973-56e3-4ca0-8afe-91e9b2f82f57 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2111.033359] env[62405]: DEBUG oslo_vmware.api [None req-c046b608-6961-4099-87f3-7202df48bdd6 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Waiting for the task: (returnval){ [ 2111.033359] env[62405]: value = "task-1948268" [ 2111.033359] env[62405]: _type = "Task" [ 2111.033359] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2111.041470] env[62405]: DEBUG oslo_vmware.api [None req-c046b608-6961-4099-87f3-7202df48bdd6 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Task: {'id': task-1948268, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2111.101614] env[62405]: DEBUG oslo_vmware.api [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]527358dc-ff76-806c-b41d-62fd06dfc978, 'name': SearchDatastore_Task, 'duration_secs': 0.009155} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2111.101988] env[62405]: DEBUG oslo_concurrency.lockutils [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2111.102179] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 9c30bac3-d4f0-4779-9f6e-bc83bb84b001] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2111.102409] env[62405]: DEBUG oslo_concurrency.lockutils [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2111.102546] env[62405]: DEBUG oslo_concurrency.lockutils [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2111.102719] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2111.102969] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-03a7c2a3-67a1-4918-b3a2-1d0ad989865a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2111.111365] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2111.111504] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2111.112180] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7bfbcfe0-038c-4305-badf-36aa2f11a701 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2111.117172] env[62405]: DEBUG oslo_vmware.api [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for the task: (returnval){ [ 2111.117172] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52fd87cc-489a-adad-57cf-e74809e47937" [ 2111.117172] env[62405]: _type = "Task" [ 2111.117172] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2111.124514] env[62405]: DEBUG oslo_vmware.api [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52fd87cc-489a-adad-57cf-e74809e47937, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2111.216493] env[62405]: DEBUG nova.compute.manager [None req-29b135bf-9e8b-4a91-bef7-c5a829755258 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 0d2b305d-d754-413c-afdf-3a2e8f143891] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2111.216745] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-29b135bf-9e8b-4a91-bef7-c5a829755258 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 0d2b305d-d754-413c-afdf-3a2e8f143891] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2111.217615] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2b21d61-1875-4a77-ad40-56001da5cc7b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2111.225283] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-29b135bf-9e8b-4a91-bef7-c5a829755258 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 0d2b305d-d754-413c-afdf-3a2e8f143891] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2111.225540] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-34690557-87a7-4f50-b642-f52fc27cbcfb {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2111.232016] env[62405]: DEBUG oslo_vmware.api [None req-29b135bf-9e8b-4a91-bef7-c5a829755258 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Waiting for the task: (returnval){ [ 2111.232016] env[62405]: value = "task-1948269" [ 2111.232016] env[62405]: _type = "Task" [ 2111.232016] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2111.239836] env[62405]: DEBUG oslo_vmware.api [None req-29b135bf-9e8b-4a91-bef7-c5a829755258 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1948269, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2111.542741] env[62405]: DEBUG oslo_vmware.api [None req-c046b608-6961-4099-87f3-7202df48bdd6 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Task: {'id': task-1948268, 'name': ReconfigVM_Task, 'duration_secs': 0.386683} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2111.543040] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-c046b608-6961-4099-87f3-7202df48bdd6 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] Reconfigured VM instance instance-00000071 to attach disk [datastore1] volume-cd1772e2-3baa-4a6d-ad99-752be5e16145/volume-cd1772e2-3baa-4a6d-ad99-752be5e16145.vmdk or device None with type thin {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2111.547591] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-040bfcce-8d9c-4c52-b080-7e915c131cba {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2111.562619] env[62405]: DEBUG oslo_vmware.api [None req-c046b608-6961-4099-87f3-7202df48bdd6 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Waiting for the task: (returnval){ [ 2111.562619] env[62405]: value = "task-1948270" [ 2111.562619] env[62405]: _type = "Task" [ 2111.562619] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2111.570128] env[62405]: DEBUG oslo_vmware.api [None req-c046b608-6961-4099-87f3-7202df48bdd6 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Task: {'id': task-1948270, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2111.627856] env[62405]: DEBUG oslo_vmware.api [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52fd87cc-489a-adad-57cf-e74809e47937, 'name': SearchDatastore_Task, 'duration_secs': 0.008499} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2111.628639] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bc90b1ec-cd0c-4479-94f5-c9b99669c78f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2111.633457] env[62405]: DEBUG oslo_vmware.api [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for the task: (returnval){ [ 2111.633457] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]520c7a49-1e9b-f72e-5f6e-0c5df3cc1c84" [ 2111.633457] env[62405]: _type = "Task" [ 2111.633457] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2111.640957] env[62405]: DEBUG oslo_vmware.api [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]520c7a49-1e9b-f72e-5f6e-0c5df3cc1c84, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2111.742078] env[62405]: DEBUG oslo_vmware.api [None req-29b135bf-9e8b-4a91-bef7-c5a829755258 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1948269, 'name': PowerOffVM_Task, 'duration_secs': 0.191953} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2111.742354] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-29b135bf-9e8b-4a91-bef7-c5a829755258 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 0d2b305d-d754-413c-afdf-3a2e8f143891] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2111.742536] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-29b135bf-9e8b-4a91-bef7-c5a829755258 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 0d2b305d-d754-413c-afdf-3a2e8f143891] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2111.742783] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-52aa5254-8a10-4cdb-8e6b-bb46546f795e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2111.849014] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-29b135bf-9e8b-4a91-bef7-c5a829755258 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 0d2b305d-d754-413c-afdf-3a2e8f143891] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2111.849235] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-29b135bf-9e8b-4a91-bef7-c5a829755258 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 0d2b305d-d754-413c-afdf-3a2e8f143891] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2111.849453] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-29b135bf-9e8b-4a91-bef7-c5a829755258 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Deleting the datastore file [datastore1] 0d2b305d-d754-413c-afdf-3a2e8f143891 {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2111.849740] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-38338c75-d907-4cab-8039-578126dd12d2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2111.857147] env[62405]: DEBUG oslo_vmware.api [None req-29b135bf-9e8b-4a91-bef7-c5a829755258 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Waiting for the task: (returnval){ [ 2111.857147] env[62405]: value = "task-1948272" [ 2111.857147] env[62405]: _type = "Task" [ 2111.857147] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2111.864592] env[62405]: DEBUG oslo_vmware.api [None req-29b135bf-9e8b-4a91-bef7-c5a829755258 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1948272, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2112.072114] env[62405]: DEBUG oslo_vmware.api [None req-c046b608-6961-4099-87f3-7202df48bdd6 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Task: {'id': task-1948270, 'name': ReconfigVM_Task, 'duration_secs': 0.172737} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2112.072429] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-c046b608-6961-4099-87f3-7202df48bdd6 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-401600', 'volume_id': 'cd1772e2-3baa-4a6d-ad99-752be5e16145', 'name': 'volume-cd1772e2-3baa-4a6d-ad99-752be5e16145', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f1e9a2e7-0fd3-4a89-8c33-bab6d1987230', 'attached_at': '', 'detached_at': '', 'volume_id': 'cd1772e2-3baa-4a6d-ad99-752be5e16145', 'serial': 'cd1772e2-3baa-4a6d-ad99-752be5e16145'} {{(pid=62405) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 2112.143825] env[62405]: DEBUG oslo_vmware.api [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]520c7a49-1e9b-f72e-5f6e-0c5df3cc1c84, 'name': SearchDatastore_Task, 'duration_secs': 0.012885} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2112.144180] env[62405]: DEBUG oslo_concurrency.lockutils [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2112.144354] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 9c30bac3-d4f0-4779-9f6e-bc83bb84b001/9c30bac3-d4f0-4779-9f6e-bc83bb84b001.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2112.144610] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-78b3a7f5-6bcb-4184-8c49-ced8c005eac0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2112.150786] env[62405]: DEBUG oslo_vmware.api [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for the task: (returnval){ [ 2112.150786] env[62405]: value = "task-1948273" [ 2112.150786] env[62405]: _type = "Task" [ 2112.150786] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2112.157978] env[62405]: DEBUG oslo_vmware.api [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948273, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2112.368463] env[62405]: DEBUG oslo_vmware.api [None req-29b135bf-9e8b-4a91-bef7-c5a829755258 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1948272, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.26367} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2112.368740] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-29b135bf-9e8b-4a91-bef7-c5a829755258 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2112.368925] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-29b135bf-9e8b-4a91-bef7-c5a829755258 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 0d2b305d-d754-413c-afdf-3a2e8f143891] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2112.369217] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-29b135bf-9e8b-4a91-bef7-c5a829755258 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 0d2b305d-d754-413c-afdf-3a2e8f143891] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2112.369288] env[62405]: INFO nova.compute.manager [None req-29b135bf-9e8b-4a91-bef7-c5a829755258 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: 0d2b305d-d754-413c-afdf-3a2e8f143891] Took 1.15 seconds to destroy the instance on the hypervisor. [ 2112.370024] env[62405]: DEBUG oslo.service.loopingcall [None req-29b135bf-9e8b-4a91-bef7-c5a829755258 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2112.370024] env[62405]: DEBUG nova.compute.manager [-] [instance: 0d2b305d-d754-413c-afdf-3a2e8f143891] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2112.370024] env[62405]: DEBUG nova.network.neutron [-] [instance: 0d2b305d-d754-413c-afdf-3a2e8f143891] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2112.661913] env[62405]: DEBUG oslo_vmware.api [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948273, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.486585} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2112.662206] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 9c30bac3-d4f0-4779-9f6e-bc83bb84b001/9c30bac3-d4f0-4779-9f6e-bc83bb84b001.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2112.662439] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 9c30bac3-d4f0-4779-9f6e-bc83bb84b001] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2112.662686] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-46f89730-81f8-486a-9f43-ad0ce48a8396 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2112.669377] env[62405]: DEBUG oslo_vmware.api [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for the task: (returnval){ [ 2112.669377] env[62405]: value = "task-1948275" [ 2112.669377] env[62405]: _type = "Task" [ 2112.669377] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2112.677447] env[62405]: DEBUG oslo_vmware.api [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948275, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2112.751526] env[62405]: DEBUG nova.compute.manager [req-29678259-db41-4e68-a07e-1a0ab0262b06 req-9bf2b8a8-cbb5-49c0-bb93-636ef68399eb service nova] [instance: 0d2b305d-d754-413c-afdf-3a2e8f143891] Received event network-vif-deleted-b23a9aa4-c4ec-442c-abdc-2c7b2d5a9961 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2112.751753] env[62405]: INFO nova.compute.manager [req-29678259-db41-4e68-a07e-1a0ab0262b06 req-9bf2b8a8-cbb5-49c0-bb93-636ef68399eb service nova] [instance: 0d2b305d-d754-413c-afdf-3a2e8f143891] Neutron deleted interface b23a9aa4-c4ec-442c-abdc-2c7b2d5a9961; detaching it from the instance and deleting it from the info cache [ 2112.751900] env[62405]: DEBUG nova.network.neutron [req-29678259-db41-4e68-a07e-1a0ab0262b06 req-9bf2b8a8-cbb5-49c0-bb93-636ef68399eb service nova] [instance: 0d2b305d-d754-413c-afdf-3a2e8f143891] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2113.020015] env[62405]: INFO nova.compute.manager [None req-6ad4c3d4-82e8-4e57-9545-5a9160fd44fc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec] Rebuilding instance [ 2113.074834] env[62405]: DEBUG nova.compute.manager [None req-6ad4c3d4-82e8-4e57-9545-5a9160fd44fc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2113.077184] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae8cdba3-cc39-479b-9305-6769ee35477b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2113.118458] env[62405]: DEBUG nova.objects.instance [None req-c046b608-6961-4099-87f3-7202df48bdd6 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Lazy-loading 'flavor' on Instance uuid f1e9a2e7-0fd3-4a89-8c33-bab6d1987230 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2113.178350] env[62405]: DEBUG oslo_vmware.api [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948275, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.057865} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2113.178632] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 9c30bac3-d4f0-4779-9f6e-bc83bb84b001] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2113.179355] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c53f3ff7-3e13-4a7b-b2d8-b2615ffabae3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2113.200313] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 9c30bac3-d4f0-4779-9f6e-bc83bb84b001] Reconfiguring VM instance instance-00000076 to attach disk [datastore1] 9c30bac3-d4f0-4779-9f6e-bc83bb84b001/9c30bac3-d4f0-4779-9f6e-bc83bb84b001.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2113.200816] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7ad9708c-e285-4093-9c64-3bb9d7b25bb0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2113.219696] env[62405]: DEBUG oslo_vmware.api [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for the task: (returnval){ [ 2113.219696] env[62405]: value = "task-1948276" [ 2113.219696] env[62405]: _type = "Task" [ 2113.219696] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2113.228811] env[62405]: DEBUG oslo_vmware.api [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948276, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2113.231211] env[62405]: DEBUG nova.network.neutron [-] [instance: 0d2b305d-d754-413c-afdf-3a2e8f143891] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2113.254473] env[62405]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7d78cdfb-1d13-4bb0-affc-4d3cc9490dae {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2113.263897] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-357fe2ae-f2b2-4b41-81c0-c8ad27fe2f76 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2113.296361] env[62405]: DEBUG nova.compute.manager [req-29678259-db41-4e68-a07e-1a0ab0262b06 req-9bf2b8a8-cbb5-49c0-bb93-636ef68399eb service nova] [instance: 0d2b305d-d754-413c-afdf-3a2e8f143891] Detach interface failed, port_id=b23a9aa4-c4ec-442c-abdc-2c7b2d5a9961, reason: Instance 0d2b305d-d754-413c-afdf-3a2e8f143891 could not be found. {{(pid=62405) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11483}} [ 2113.623763] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c046b608-6961-4099-87f3-7202df48bdd6 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Lock "f1e9a2e7-0fd3-4a89-8c33-bab6d1987230" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.245s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2113.730825] env[62405]: DEBUG oslo_vmware.api [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948276, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2113.733582] env[62405]: INFO nova.compute.manager [-] [instance: 0d2b305d-d754-413c-afdf-3a2e8f143891] Took 1.36 seconds to deallocate network for instance. [ 2114.089427] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ad4c3d4-82e8-4e57-9545-5a9160fd44fc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2114.089815] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-92b193f3-ca2c-4c5a-91d3-d52d6049f44b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2114.099658] env[62405]: DEBUG oslo_vmware.api [None req-6ad4c3d4-82e8-4e57-9545-5a9160fd44fc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Waiting for the task: (returnval){ [ 2114.099658] env[62405]: value = "task-1948277" [ 2114.099658] env[62405]: _type = "Task" [ 2114.099658] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2114.107436] env[62405]: DEBUG oslo_vmware.api [None req-6ad4c3d4-82e8-4e57-9545-5a9160fd44fc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948277, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2114.230376] env[62405]: DEBUG oslo_vmware.api [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948276, 'name': ReconfigVM_Task, 'duration_secs': 0.66383} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2114.230682] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 9c30bac3-d4f0-4779-9f6e-bc83bb84b001] Reconfigured VM instance instance-00000076 to attach disk [datastore1] 9c30bac3-d4f0-4779-9f6e-bc83bb84b001/9c30bac3-d4f0-4779-9f6e-bc83bb84b001.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2114.231331] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-514e47ec-8a5f-4bf6-9f6e-67f949aa0931 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2114.238188] env[62405]: DEBUG oslo_vmware.api [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for the task: (returnval){ [ 2114.238188] env[62405]: value = "task-1948278" [ 2114.238188] env[62405]: _type = "Task" [ 2114.238188] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2114.239152] env[62405]: DEBUG oslo_concurrency.lockutils [None req-29b135bf-9e8b-4a91-bef7-c5a829755258 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2114.239389] env[62405]: DEBUG oslo_concurrency.lockutils [None req-29b135bf-9e8b-4a91-bef7-c5a829755258 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2114.239600] env[62405]: DEBUG nova.objects.instance [None req-29b135bf-9e8b-4a91-bef7-c5a829755258 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Lazy-loading 'resources' on Instance uuid 0d2b305d-d754-413c-afdf-3a2e8f143891 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2114.248472] env[62405]: DEBUG oslo_vmware.api [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948278, 'name': Rename_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2114.454361] env[62405]: DEBUG oslo_concurrency.lockutils [None req-bfeda307-a0ce-4f1c-9353-7045fb626b94 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Acquiring lock "f1e9a2e7-0fd3-4a89-8c33-bab6d1987230" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2114.454600] env[62405]: DEBUG oslo_concurrency.lockutils [None req-bfeda307-a0ce-4f1c-9353-7045fb626b94 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Lock "f1e9a2e7-0fd3-4a89-8c33-bab6d1987230" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2114.610458] env[62405]: DEBUG oslo_vmware.api [None req-6ad4c3d4-82e8-4e57-9545-5a9160fd44fc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948277, 'name': PowerOffVM_Task, 'duration_secs': 0.174227} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2114.610675] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ad4c3d4-82e8-4e57-9545-5a9160fd44fc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2114.610905] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-6ad4c3d4-82e8-4e57-9545-5a9160fd44fc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2114.611651] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd061643-6269-47b6-815f-7341745c8c52 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2114.617969] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-6ad4c3d4-82e8-4e57-9545-5a9160fd44fc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2114.618207] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f63d52fb-2764-41a1-a3a4-abe5c88d9c18 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2114.742483] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-6ad4c3d4-82e8-4e57-9545-5a9160fd44fc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2114.742672] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-6ad4c3d4-82e8-4e57-9545-5a9160fd44fc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2114.743353] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-6ad4c3d4-82e8-4e57-9545-5a9160fd44fc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Deleting the datastore file [datastore1] 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2114.757847] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a0f31cf4-c743-4137-9feb-27bb26f8a388 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2114.761392] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-6926c9b1-e0bc-4d4c-92c3-115e1176b377 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 7c74cae9-1607-4928-a927-f0c8b86f7698] Volume attach. Driver type: vmdk {{(pid=62405) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 2114.761392] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-6926c9b1-e0bc-4d4c-92c3-115e1176b377 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 7c74cae9-1607-4928-a927-f0c8b86f7698] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-401602', 'volume_id': 'becfbecd-8a34-4ed5-b62e-25e975b48a2f', 'name': 'volume-becfbecd-8a34-4ed5-b62e-25e975b48a2f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7c74cae9-1607-4928-a927-f0c8b86f7698', 'attached_at': '', 'detached_at': '', 'volume_id': 'becfbecd-8a34-4ed5-b62e-25e975b48a2f', 'serial': 'becfbecd-8a34-4ed5-b62e-25e975b48a2f'} {{(pid=62405) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 2114.762150] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9295b55a-5938-4071-b569-85535cd784ff {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2114.775313] env[62405]: DEBUG oslo_vmware.api [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948278, 'name': Rename_Task, 'duration_secs': 0.137917} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2114.802621] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 9c30bac3-d4f0-4779-9f6e-bc83bb84b001] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2114.803075] env[62405]: DEBUG oslo_vmware.api [None req-6ad4c3d4-82e8-4e57-9545-5a9160fd44fc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Waiting for the task: (returnval){ [ 2114.803075] env[62405]: value = "task-1948280" [ 2114.803075] env[62405]: _type = "Task" [ 2114.803075] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2114.805962] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e6e3f058-e0f6-485b-bd62-0900fd417346 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2114.808140] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea7769d9-3796-45f7-a628-3f1a227b1c0f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2114.818757] env[62405]: DEBUG oslo_vmware.api [None req-6ad4c3d4-82e8-4e57-9545-5a9160fd44fc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948280, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2114.831784] env[62405]: DEBUG oslo_vmware.api [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for the task: (returnval){ [ 2114.831784] env[62405]: value = "task-1948281" [ 2114.831784] env[62405]: _type = "Task" [ 2114.831784] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2114.839088] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-6926c9b1-e0bc-4d4c-92c3-115e1176b377 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 7c74cae9-1607-4928-a927-f0c8b86f7698] Reconfiguring VM instance instance-00000072 to attach disk [datastore1] volume-becfbecd-8a34-4ed5-b62e-25e975b48a2f/volume-becfbecd-8a34-4ed5-b62e-25e975b48a2f.vmdk or device None with type thin {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2114.841883] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0c8cda09-043e-4598-b17a-a4c2f27cb7b5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2114.863737] env[62405]: DEBUG oslo_vmware.api [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948281, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2114.866120] env[62405]: DEBUG oslo_vmware.api [None req-6926c9b1-e0bc-4d4c-92c3-115e1176b377 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Waiting for the task: (returnval){ [ 2114.866120] env[62405]: value = "task-1948282" [ 2114.866120] env[62405]: _type = "Task" [ 2114.866120] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2114.878419] env[62405]: DEBUG oslo_vmware.api [None req-6926c9b1-e0bc-4d4c-92c3-115e1176b377 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1948282, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2114.957893] env[62405]: DEBUG nova.compute.utils [None req-bfeda307-a0ce-4f1c-9353-7045fb626b94 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2114.960734] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d128cbd-aadc-4e01-8ecc-61cd755dcf5e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2114.968923] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cde40837-e349-4481-9cd3-437df2913f2a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2115.004736] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-889b4cc6-33b2-42a2-8c40-caa16aaac15d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2115.012447] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31064fa1-138e-4223-8bf6-1153b9da75f2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2115.026548] env[62405]: DEBUG nova.compute.provider_tree [None req-29b135bf-9e8b-4a91-bef7-c5a829755258 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2115.317982] env[62405]: DEBUG oslo_vmware.api [None req-6ad4c3d4-82e8-4e57-9545-5a9160fd44fc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948280, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.256956} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2115.318332] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-6ad4c3d4-82e8-4e57-9545-5a9160fd44fc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2115.318558] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-6ad4c3d4-82e8-4e57-9545-5a9160fd44fc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2115.318748] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-6ad4c3d4-82e8-4e57-9545-5a9160fd44fc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2115.350398] env[62405]: DEBUG oslo_vmware.api [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948281, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2115.375363] env[62405]: DEBUG oslo_vmware.api [None req-6926c9b1-e0bc-4d4c-92c3-115e1176b377 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1948282, 'name': ReconfigVM_Task, 'duration_secs': 0.347932} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2115.375603] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-6926c9b1-e0bc-4d4c-92c3-115e1176b377 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 7c74cae9-1607-4928-a927-f0c8b86f7698] Reconfigured VM instance instance-00000072 to attach disk [datastore1] volume-becfbecd-8a34-4ed5-b62e-25e975b48a2f/volume-becfbecd-8a34-4ed5-b62e-25e975b48a2f.vmdk or device None with type thin {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2115.380262] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-544b87f3-714c-4275-993c-343796e9914a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2115.395210] env[62405]: DEBUG oslo_vmware.api [None req-6926c9b1-e0bc-4d4c-92c3-115e1176b377 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Waiting for the task: (returnval){ [ 2115.395210] env[62405]: value = "task-1948283" [ 2115.395210] env[62405]: _type = "Task" [ 2115.395210] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2115.403066] env[62405]: DEBUG oslo_vmware.api [None req-6926c9b1-e0bc-4d4c-92c3-115e1176b377 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1948283, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2115.464797] env[62405]: DEBUG oslo_concurrency.lockutils [None req-bfeda307-a0ce-4f1c-9353-7045fb626b94 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Lock "f1e9a2e7-0fd3-4a89-8c33-bab6d1987230" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.010s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2115.530425] env[62405]: DEBUG nova.scheduler.client.report [None req-29b135bf-9e8b-4a91-bef7-c5a829755258 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2115.851597] env[62405]: DEBUG oslo_vmware.api [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948281, 'name': PowerOnVM_Task, 'duration_secs': 0.730798} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2115.851855] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 9c30bac3-d4f0-4779-9f6e-bc83bb84b001] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2115.852074] env[62405]: DEBUG nova.compute.manager [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 9c30bac3-d4f0-4779-9f6e-bc83bb84b001] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2115.852866] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97077819-4be4-482c-a096-8cae6c5d809e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2115.905226] env[62405]: DEBUG oslo_vmware.api [None req-6926c9b1-e0bc-4d4c-92c3-115e1176b377 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1948283, 'name': ReconfigVM_Task, 'duration_secs': 0.162468} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2115.905550] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-6926c9b1-e0bc-4d4c-92c3-115e1176b377 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 7c74cae9-1607-4928-a927-f0c8b86f7698] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-401602', 'volume_id': 'becfbecd-8a34-4ed5-b62e-25e975b48a2f', 'name': 'volume-becfbecd-8a34-4ed5-b62e-25e975b48a2f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7c74cae9-1607-4928-a927-f0c8b86f7698', 'attached_at': '', 'detached_at': '', 'volume_id': 'becfbecd-8a34-4ed5-b62e-25e975b48a2f', 'serial': 'becfbecd-8a34-4ed5-b62e-25e975b48a2f'} {{(pid=62405) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 2116.035190] env[62405]: DEBUG oslo_concurrency.lockutils [None req-29b135bf-9e8b-4a91-bef7-c5a829755258 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.796s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2116.060750] env[62405]: INFO nova.scheduler.client.report [None req-29b135bf-9e8b-4a91-bef7-c5a829755258 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Deleted allocations for instance 0d2b305d-d754-413c-afdf-3a2e8f143891 [ 2116.356107] env[62405]: DEBUG nova.virt.hardware [None req-6ad4c3d4-82e8-4e57-9545-5a9160fd44fc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2116.356394] env[62405]: DEBUG nova.virt.hardware [None req-6ad4c3d4-82e8-4e57-9545-5a9160fd44fc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2116.356523] env[62405]: DEBUG nova.virt.hardware [None req-6ad4c3d4-82e8-4e57-9545-5a9160fd44fc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2116.356709] env[62405]: DEBUG nova.virt.hardware [None req-6ad4c3d4-82e8-4e57-9545-5a9160fd44fc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2116.356859] env[62405]: DEBUG nova.virt.hardware [None req-6ad4c3d4-82e8-4e57-9545-5a9160fd44fc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2116.357024] env[62405]: DEBUG nova.virt.hardware [None req-6ad4c3d4-82e8-4e57-9545-5a9160fd44fc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2116.357244] env[62405]: DEBUG nova.virt.hardware [None req-6ad4c3d4-82e8-4e57-9545-5a9160fd44fc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2116.357409] env[62405]: DEBUG nova.virt.hardware [None req-6ad4c3d4-82e8-4e57-9545-5a9160fd44fc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2116.357578] env[62405]: DEBUG nova.virt.hardware [None req-6ad4c3d4-82e8-4e57-9545-5a9160fd44fc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2116.357741] env[62405]: DEBUG nova.virt.hardware [None req-6ad4c3d4-82e8-4e57-9545-5a9160fd44fc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2116.357912] env[62405]: DEBUG nova.virt.hardware [None req-6ad4c3d4-82e8-4e57-9545-5a9160fd44fc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2116.358803] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d68fdbb-4b3d-496b-83f4-1ba1c1924b78 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.363479] env[62405]: INFO nova.compute.manager [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 9c30bac3-d4f0-4779-9f6e-bc83bb84b001] bringing vm to original state: 'stopped' [ 2116.369334] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1776b4f4-8efc-4fa8-b7be-30bbfc9cee7c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.382631] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-6ad4c3d4-82e8-4e57-9545-5a9160fd44fc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4f:28:77', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0a88b707-352e-4be7-b1d6-ad6074b40ed9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c50bcbe3-9e1f-4a25-a53f-4753d9b04dfe', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2116.389835] env[62405]: DEBUG oslo.service.loopingcall [None req-6ad4c3d4-82e8-4e57-9545-5a9160fd44fc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2116.390093] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2116.390327] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-69c0daeb-a91c-4d63-a578-daf4a8286577 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.412270] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2116.412270] env[62405]: value = "task-1948284" [ 2116.412270] env[62405]: _type = "Task" [ 2116.412270] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2116.419940] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1948284, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2116.524977] env[62405]: DEBUG oslo_concurrency.lockutils [None req-bfeda307-a0ce-4f1c-9353-7045fb626b94 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Acquiring lock "f1e9a2e7-0fd3-4a89-8c33-bab6d1987230" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2116.525292] env[62405]: DEBUG oslo_concurrency.lockutils [None req-bfeda307-a0ce-4f1c-9353-7045fb626b94 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Lock "f1e9a2e7-0fd3-4a89-8c33-bab6d1987230" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2116.525573] env[62405]: INFO nova.compute.manager [None req-bfeda307-a0ce-4f1c-9353-7045fb626b94 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] Attaching volume 1f963a86-6332-49b1-ac32-d80995d45115 to /dev/sdc [ 2116.555549] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f18410ab-aaf3-48ed-a99e-de24a75fc47d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.562304] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0776664a-66cc-419b-ac26-29df7571b706 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.568453] env[62405]: DEBUG oslo_concurrency.lockutils [None req-29b135bf-9e8b-4a91-bef7-c5a829755258 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Lock "0d2b305d-d754-413c-afdf-3a2e8f143891" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.858s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2116.575041] env[62405]: DEBUG nova.virt.block_device [None req-bfeda307-a0ce-4f1c-9353-7045fb626b94 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] Updating existing volume attachment record: e89df465-f205-4956-b79a-783e75efb554 {{(pid=62405) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 2116.922960] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1948284, 'name': CreateVM_Task, 'duration_secs': 0.360781} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2116.922960] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2116.923318] env[62405]: DEBUG oslo_concurrency.lockutils [None req-6ad4c3d4-82e8-4e57-9545-5a9160fd44fc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2116.923492] env[62405]: DEBUG oslo_concurrency.lockutils [None req-6ad4c3d4-82e8-4e57-9545-5a9160fd44fc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2116.923806] env[62405]: DEBUG oslo_concurrency.lockutils [None req-6ad4c3d4-82e8-4e57-9545-5a9160fd44fc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2116.924061] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-65926f01-bb9d-4f14-9bd1-7a2c251a0461 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.928387] env[62405]: DEBUG oslo_vmware.api [None req-6ad4c3d4-82e8-4e57-9545-5a9160fd44fc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Waiting for the task: (returnval){ [ 2116.928387] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52fde190-b542-3afb-a6fb-2e1af2566f18" [ 2116.928387] env[62405]: _type = "Task" [ 2116.928387] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2116.935557] env[62405]: DEBUG oslo_vmware.api [None req-6ad4c3d4-82e8-4e57-9545-5a9160fd44fc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52fde190-b542-3afb-a6fb-2e1af2566f18, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2116.938775] env[62405]: DEBUG nova.objects.instance [None req-6926c9b1-e0bc-4d4c-92c3-115e1176b377 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Lazy-loading 'flavor' on Instance uuid 7c74cae9-1607-4928-a927-f0c8b86f7698 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2117.371958] env[62405]: DEBUG oslo_concurrency.lockutils [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Acquiring lock "9c30bac3-d4f0-4779-9f6e-bc83bb84b001" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2117.372246] env[62405]: DEBUG oslo_concurrency.lockutils [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Lock "9c30bac3-d4f0-4779-9f6e-bc83bb84b001" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2117.372363] env[62405]: DEBUG nova.compute.manager [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 9c30bac3-d4f0-4779-9f6e-bc83bb84b001] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2117.373535] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31529740-9007-44d1-9d5a-5051f7f90829 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.380746] env[62405]: DEBUG nova.compute.manager [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 9c30bac3-d4f0-4779-9f6e-bc83bb84b001] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62405) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 2117.438948] env[62405]: DEBUG oslo_vmware.api [None req-6ad4c3d4-82e8-4e57-9545-5a9160fd44fc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52fde190-b542-3afb-a6fb-2e1af2566f18, 'name': SearchDatastore_Task, 'duration_secs': 0.00919} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2117.439286] env[62405]: DEBUG oslo_concurrency.lockutils [None req-6ad4c3d4-82e8-4e57-9545-5a9160fd44fc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2117.439518] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-6ad4c3d4-82e8-4e57-9545-5a9160fd44fc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2117.439746] env[62405]: DEBUG oslo_concurrency.lockutils [None req-6ad4c3d4-82e8-4e57-9545-5a9160fd44fc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2117.439895] env[62405]: DEBUG oslo_concurrency.lockutils [None req-6ad4c3d4-82e8-4e57-9545-5a9160fd44fc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2117.440085] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-6ad4c3d4-82e8-4e57-9545-5a9160fd44fc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2117.440358] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ec819a44-432d-47de-a00c-3190a652156e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.444164] env[62405]: DEBUG oslo_concurrency.lockutils [None req-6926c9b1-e0bc-4d4c-92c3-115e1176b377 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Lock "7c74cae9-1607-4928-a927-f0c8b86f7698" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.296s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2117.449933] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-6ad4c3d4-82e8-4e57-9545-5a9160fd44fc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2117.450132] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-6ad4c3d4-82e8-4e57-9545-5a9160fd44fc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2117.450871] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-29bb6b5b-6042-46c9-8db4-c839005d0b31 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.456305] env[62405]: DEBUG oslo_vmware.api [None req-6ad4c3d4-82e8-4e57-9545-5a9160fd44fc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Waiting for the task: (returnval){ [ 2117.456305] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52f91da2-6c7c-04d0-acb9-ed42c851e681" [ 2117.456305] env[62405]: _type = "Task" [ 2117.456305] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2117.464233] env[62405]: DEBUG oslo_vmware.api [None req-6ad4c3d4-82e8-4e57-9545-5a9160fd44fc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52f91da2-6c7c-04d0-acb9-ed42c851e681, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2117.470924] env[62405]: DEBUG oslo_concurrency.lockutils [None req-fa65059a-c1a6-46ae-8e1b-892d49562ab7 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Acquiring lock "a91a6d04-2ec0-4568-bdb3-732d148644de" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2117.471156] env[62405]: DEBUG oslo_concurrency.lockutils [None req-fa65059a-c1a6-46ae-8e1b-892d49562ab7 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Lock "a91a6d04-2ec0-4568-bdb3-732d148644de" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2117.471401] env[62405]: DEBUG oslo_concurrency.lockutils [None req-fa65059a-c1a6-46ae-8e1b-892d49562ab7 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Acquiring lock "a91a6d04-2ec0-4568-bdb3-732d148644de-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2117.471588] env[62405]: DEBUG oslo_concurrency.lockutils [None req-fa65059a-c1a6-46ae-8e1b-892d49562ab7 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Lock "a91a6d04-2ec0-4568-bdb3-732d148644de-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2117.471756] env[62405]: DEBUG oslo_concurrency.lockutils [None req-fa65059a-c1a6-46ae-8e1b-892d49562ab7 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Lock "a91a6d04-2ec0-4568-bdb3-732d148644de-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2117.473604] env[62405]: INFO nova.compute.manager [None req-fa65059a-c1a6-46ae-8e1b-892d49562ab7 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: a91a6d04-2ec0-4568-bdb3-732d148644de] Terminating instance [ 2117.870855] env[62405]: DEBUG oslo_concurrency.lockutils [None req-62665d04-498a-451a-958a-641c646b7c33 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Acquiring lock "7c74cae9-1607-4928-a927-f0c8b86f7698" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2117.871207] env[62405]: DEBUG oslo_concurrency.lockutils [None req-62665d04-498a-451a-958a-641c646b7c33 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Lock "7c74cae9-1607-4928-a927-f0c8b86f7698" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2117.884027] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 9c30bac3-d4f0-4779-9f6e-bc83bb84b001] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2117.884340] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1367a65c-2525-4e20-bfa0-19cd787d05c2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.892218] env[62405]: DEBUG oslo_vmware.api [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for the task: (returnval){ [ 2117.892218] env[62405]: value = "task-1948286" [ 2117.892218] env[62405]: _type = "Task" [ 2117.892218] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2117.903034] env[62405]: DEBUG oslo_vmware.api [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948286, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2117.969224] env[62405]: DEBUG oslo_vmware.api [None req-6ad4c3d4-82e8-4e57-9545-5a9160fd44fc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52f91da2-6c7c-04d0-acb9-ed42c851e681, 'name': SearchDatastore_Task, 'duration_secs': 0.008967} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2117.970022] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-30ce6ba7-9ffc-4b55-a76c-90578a1bf622 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.975909] env[62405]: DEBUG oslo_vmware.api [None req-6ad4c3d4-82e8-4e57-9545-5a9160fd44fc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Waiting for the task: (returnval){ [ 2117.975909] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52191ecb-9d5f-5662-f286-facc2d0449b4" [ 2117.975909] env[62405]: _type = "Task" [ 2117.975909] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2117.976637] env[62405]: DEBUG nova.compute.manager [None req-fa65059a-c1a6-46ae-8e1b-892d49562ab7 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: a91a6d04-2ec0-4568-bdb3-732d148644de] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2117.976858] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-fa65059a-c1a6-46ae-8e1b-892d49562ab7 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: a91a6d04-2ec0-4568-bdb3-732d148644de] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2117.980557] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31097a98-f2e6-410e-b9f3-272ceb63f98b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.988405] env[62405]: DEBUG oslo_vmware.api [None req-6ad4c3d4-82e8-4e57-9545-5a9160fd44fc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52191ecb-9d5f-5662-f286-facc2d0449b4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2117.990431] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa65059a-c1a6-46ae-8e1b-892d49562ab7 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: a91a6d04-2ec0-4568-bdb3-732d148644de] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2117.990673] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-38590832-ffdf-42e7-a775-ce7b376d0775 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.996763] env[62405]: DEBUG oslo_vmware.api [None req-fa65059a-c1a6-46ae-8e1b-892d49562ab7 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Waiting for the task: (returnval){ [ 2117.996763] env[62405]: value = "task-1948287" [ 2117.996763] env[62405]: _type = "Task" [ 2117.996763] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2118.004869] env[62405]: DEBUG oslo_vmware.api [None req-fa65059a-c1a6-46ae-8e1b-892d49562ab7 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1948287, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2118.374733] env[62405]: INFO nova.compute.manager [None req-62665d04-498a-451a-958a-641c646b7c33 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 7c74cae9-1607-4928-a927-f0c8b86f7698] Detaching volume becfbecd-8a34-4ed5-b62e-25e975b48a2f [ 2118.403716] env[62405]: DEBUG oslo_vmware.api [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948286, 'name': PowerOffVM_Task, 'duration_secs': 0.204571} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2118.403853] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 9c30bac3-d4f0-4779-9f6e-bc83bb84b001] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2118.404068] env[62405]: DEBUG nova.compute.manager [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 9c30bac3-d4f0-4779-9f6e-bc83bb84b001] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2118.404894] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df237b85-4033-4174-92da-40eb2708415b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.408065] env[62405]: INFO nova.virt.block_device [None req-62665d04-498a-451a-958a-641c646b7c33 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 7c74cae9-1607-4928-a927-f0c8b86f7698] Attempting to driver detach volume becfbecd-8a34-4ed5-b62e-25e975b48a2f from mountpoint /dev/sdb [ 2118.408336] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-62665d04-498a-451a-958a-641c646b7c33 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 7c74cae9-1607-4928-a927-f0c8b86f7698] Volume detach. Driver type: vmdk {{(pid=62405) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2118.408538] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-62665d04-498a-451a-958a-641c646b7c33 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 7c74cae9-1607-4928-a927-f0c8b86f7698] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-401602', 'volume_id': 'becfbecd-8a34-4ed5-b62e-25e975b48a2f', 'name': 'volume-becfbecd-8a34-4ed5-b62e-25e975b48a2f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7c74cae9-1607-4928-a927-f0c8b86f7698', 'attached_at': '', 'detached_at': '', 'volume_id': 'becfbecd-8a34-4ed5-b62e-25e975b48a2f', 'serial': 'becfbecd-8a34-4ed5-b62e-25e975b48a2f'} {{(pid=62405) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2118.409271] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85a099d0-1c61-4889-afaf-a69ebb10cacc {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.433633] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a02af9f-70b3-47a2-a489-8662897b1418 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.440843] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8869c224-5543-4edb-b4ef-7edc83c9fb0f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.460806] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5a47afb-970b-48a8-87e6-225e11f8243c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.476390] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-62665d04-498a-451a-958a-641c646b7c33 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] The volume has not been displaced from its original location: [datastore1] volume-becfbecd-8a34-4ed5-b62e-25e975b48a2f/volume-becfbecd-8a34-4ed5-b62e-25e975b48a2f.vmdk. No consolidation needed. {{(pid=62405) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2118.481723] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-62665d04-498a-451a-958a-641c646b7c33 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 7c74cae9-1607-4928-a927-f0c8b86f7698] Reconfiguring VM instance instance-00000072 to detach disk 2001 {{(pid=62405) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2118.482346] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-98695718-4ad1-4837-a373-4235f8cc3a1e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.502553] env[62405]: DEBUG oslo_vmware.api [None req-62665d04-498a-451a-958a-641c646b7c33 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Waiting for the task: (returnval){ [ 2118.502553] env[62405]: value = "task-1948288" [ 2118.502553] env[62405]: _type = "Task" [ 2118.502553] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2118.505862] env[62405]: DEBUG oslo_vmware.api [None req-6ad4c3d4-82e8-4e57-9545-5a9160fd44fc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52191ecb-9d5f-5662-f286-facc2d0449b4, 'name': SearchDatastore_Task, 'duration_secs': 0.012367} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2118.510943] env[62405]: DEBUG oslo_concurrency.lockutils [None req-6ad4c3d4-82e8-4e57-9545-5a9160fd44fc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2118.511222] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ad4c3d4-82e8-4e57-9545-5a9160fd44fc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec/60ccb9f6-29ba-44eb-8cec-0d9b78c235ec.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2118.511485] env[62405]: DEBUG oslo_vmware.api [None req-fa65059a-c1a6-46ae-8e1b-892d49562ab7 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1948287, 'name': PowerOffVM_Task, 'duration_secs': 0.194605} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2118.511683] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f6aff948-f998-472e-97d6-ef481c94cb29 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.513559] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa65059a-c1a6-46ae-8e1b-892d49562ab7 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: a91a6d04-2ec0-4568-bdb3-732d148644de] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2118.513740] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-fa65059a-c1a6-46ae-8e1b-892d49562ab7 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: a91a6d04-2ec0-4568-bdb3-732d148644de] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2118.514472] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fc4de2dc-c370-4b06-bfda-7bf7f19eee30 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.518704] env[62405]: DEBUG oslo_vmware.api [None req-62665d04-498a-451a-958a-641c646b7c33 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1948288, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2118.522671] env[62405]: DEBUG oslo_vmware.api [None req-6ad4c3d4-82e8-4e57-9545-5a9160fd44fc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Waiting for the task: (returnval){ [ 2118.522671] env[62405]: value = "task-1948289" [ 2118.522671] env[62405]: _type = "Task" [ 2118.522671] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2118.530451] env[62405]: DEBUG oslo_vmware.api [None req-6ad4c3d4-82e8-4e57-9545-5a9160fd44fc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948289, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2118.704803] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-fa65059a-c1a6-46ae-8e1b-892d49562ab7 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: a91a6d04-2ec0-4568-bdb3-732d148644de] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2118.705491] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-fa65059a-c1a6-46ae-8e1b-892d49562ab7 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: a91a6d04-2ec0-4568-bdb3-732d148644de] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2118.705491] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa65059a-c1a6-46ae-8e1b-892d49562ab7 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Deleting the datastore file [datastore1] a91a6d04-2ec0-4568-bdb3-732d148644de {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2118.705716] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0202b759-aa41-4ac7-b80f-ee84b4f920ae {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.713075] env[62405]: DEBUG oslo_vmware.api [None req-fa65059a-c1a6-46ae-8e1b-892d49562ab7 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Waiting for the task: (returnval){ [ 2118.713075] env[62405]: value = "task-1948291" [ 2118.713075] env[62405]: _type = "Task" [ 2118.713075] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2118.723653] env[62405]: DEBUG oslo_vmware.api [None req-fa65059a-c1a6-46ae-8e1b-892d49562ab7 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1948291, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2118.941666] env[62405]: DEBUG oslo_concurrency.lockutils [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Lock "9c30bac3-d4f0-4779-9f6e-bc83bb84b001" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.569s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2119.015941] env[62405]: DEBUG oslo_vmware.api [None req-62665d04-498a-451a-958a-641c646b7c33 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1948288, 'name': ReconfigVM_Task, 'duration_secs': 0.213948} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2119.016313] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-62665d04-498a-451a-958a-641c646b7c33 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 7c74cae9-1607-4928-a927-f0c8b86f7698] Reconfigured VM instance instance-00000072 to detach disk 2001 {{(pid=62405) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2119.021411] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9b34f4af-a9e4-47f3-8b72-d64467ebf0ba {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2119.040197] env[62405]: DEBUG oslo_vmware.api [None req-6ad4c3d4-82e8-4e57-9545-5a9160fd44fc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948289, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.416751} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2119.041482] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ad4c3d4-82e8-4e57-9545-5a9160fd44fc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec/60ccb9f6-29ba-44eb-8cec-0d9b78c235ec.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2119.041716] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-6ad4c3d4-82e8-4e57-9545-5a9160fd44fc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2119.042025] env[62405]: DEBUG oslo_vmware.api [None req-62665d04-498a-451a-958a-641c646b7c33 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Waiting for the task: (returnval){ [ 2119.042025] env[62405]: value = "task-1948293" [ 2119.042025] env[62405]: _type = "Task" [ 2119.042025] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2119.042222] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e9939ac4-4ccf-47f7-905b-846da2b5f860 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2119.053347] env[62405]: DEBUG oslo_vmware.api [None req-62665d04-498a-451a-958a-641c646b7c33 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1948293, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2119.054650] env[62405]: DEBUG oslo_vmware.api [None req-6ad4c3d4-82e8-4e57-9545-5a9160fd44fc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Waiting for the task: (returnval){ [ 2119.054650] env[62405]: value = "task-1948294" [ 2119.054650] env[62405]: _type = "Task" [ 2119.054650] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2119.062469] env[62405]: DEBUG oslo_vmware.api [None req-6ad4c3d4-82e8-4e57-9545-5a9160fd44fc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948294, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2119.222937] env[62405]: DEBUG oslo_vmware.api [None req-fa65059a-c1a6-46ae-8e1b-892d49562ab7 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Task: {'id': task-1948291, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.318776} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2119.223231] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa65059a-c1a6-46ae-8e1b-892d49562ab7 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2119.223422] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-fa65059a-c1a6-46ae-8e1b-892d49562ab7 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: a91a6d04-2ec0-4568-bdb3-732d148644de] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2119.223606] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-fa65059a-c1a6-46ae-8e1b-892d49562ab7 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: a91a6d04-2ec0-4568-bdb3-732d148644de] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2119.223783] env[62405]: INFO nova.compute.manager [None req-fa65059a-c1a6-46ae-8e1b-892d49562ab7 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] [instance: a91a6d04-2ec0-4568-bdb3-732d148644de] Took 1.25 seconds to destroy the instance on the hypervisor. [ 2119.224034] env[62405]: DEBUG oslo.service.loopingcall [None req-fa65059a-c1a6-46ae-8e1b-892d49562ab7 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2119.224232] env[62405]: DEBUG nova.compute.manager [-] [instance: a91a6d04-2ec0-4568-bdb3-732d148644de] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2119.224329] env[62405]: DEBUG nova.network.neutron [-] [instance: a91a6d04-2ec0-4568-bdb3-732d148644de] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2119.447281] env[62405]: DEBUG oslo_concurrency.lockutils [None req-da2e94e4-94b9-455f-99bb-6b72b3f2db55 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Acquiring lock "9c30bac3-d4f0-4779-9f6e-bc83bb84b001" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2119.447551] env[62405]: DEBUG oslo_concurrency.lockutils [None req-da2e94e4-94b9-455f-99bb-6b72b3f2db55 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Lock "9c30bac3-d4f0-4779-9f6e-bc83bb84b001" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2119.447728] env[62405]: DEBUG oslo_concurrency.lockutils [None req-da2e94e4-94b9-455f-99bb-6b72b3f2db55 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Acquiring lock "9c30bac3-d4f0-4779-9f6e-bc83bb84b001-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2119.447943] env[62405]: DEBUG oslo_concurrency.lockutils [None req-da2e94e4-94b9-455f-99bb-6b72b3f2db55 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Lock "9c30bac3-d4f0-4779-9f6e-bc83bb84b001-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2119.448089] env[62405]: DEBUG oslo_concurrency.lockutils [None req-da2e94e4-94b9-455f-99bb-6b72b3f2db55 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Lock "9c30bac3-d4f0-4779-9f6e-bc83bb84b001-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2119.451956] env[62405]: INFO nova.compute.manager [None req-da2e94e4-94b9-455f-99bb-6b72b3f2db55 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 9c30bac3-d4f0-4779-9f6e-bc83bb84b001] Terminating instance [ 2119.453067] env[62405]: DEBUG oslo_concurrency.lockutils [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2119.453247] env[62405]: DEBUG oslo_concurrency.lockutils [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2119.453408] env[62405]: DEBUG nova.objects.instance [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 9c30bac3-d4f0-4779-9f6e-bc83bb84b001] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62405) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 2119.528255] env[62405]: DEBUG nova.compute.manager [req-d130f527-e86b-4712-9fa1-d8b6a4ec4be3 req-ee65eeb5-d61c-4b20-88b5-05ccd5678984 service nova] [instance: a91a6d04-2ec0-4568-bdb3-732d148644de] Received event network-vif-deleted-744277fe-5ae4-47a1-8b6e-f92b066ed2a3 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2119.528501] env[62405]: INFO nova.compute.manager [req-d130f527-e86b-4712-9fa1-d8b6a4ec4be3 req-ee65eeb5-d61c-4b20-88b5-05ccd5678984 service nova] [instance: a91a6d04-2ec0-4568-bdb3-732d148644de] Neutron deleted interface 744277fe-5ae4-47a1-8b6e-f92b066ed2a3; detaching it from the instance and deleting it from the info cache [ 2119.528688] env[62405]: DEBUG nova.network.neutron [req-d130f527-e86b-4712-9fa1-d8b6a4ec4be3 req-ee65eeb5-d61c-4b20-88b5-05ccd5678984 service nova] [instance: a91a6d04-2ec0-4568-bdb3-732d148644de] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2119.554056] env[62405]: DEBUG oslo_vmware.api [None req-62665d04-498a-451a-958a-641c646b7c33 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1948293, 'name': ReconfigVM_Task, 'duration_secs': 0.199397} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2119.554380] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-62665d04-498a-451a-958a-641c646b7c33 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 7c74cae9-1607-4928-a927-f0c8b86f7698] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-401602', 'volume_id': 'becfbecd-8a34-4ed5-b62e-25e975b48a2f', 'name': 'volume-becfbecd-8a34-4ed5-b62e-25e975b48a2f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7c74cae9-1607-4928-a927-f0c8b86f7698', 'attached_at': '', 'detached_at': '', 'volume_id': 'becfbecd-8a34-4ed5-b62e-25e975b48a2f', 'serial': 'becfbecd-8a34-4ed5-b62e-25e975b48a2f'} {{(pid=62405) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2119.564392] env[62405]: DEBUG oslo_vmware.api [None req-6ad4c3d4-82e8-4e57-9545-5a9160fd44fc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948294, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061314} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2119.564647] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-6ad4c3d4-82e8-4e57-9545-5a9160fd44fc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2119.565390] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba7ac24c-b900-44b3-a1b8-536292cce493 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2119.587639] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-6ad4c3d4-82e8-4e57-9545-5a9160fd44fc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec] Reconfiguring VM instance instance-00000073 to attach disk [datastore1] 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec/60ccb9f6-29ba-44eb-8cec-0d9b78c235ec.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2119.587889] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d4d9ddb1-05cc-4f9f-a603-761b27655b87 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2119.608897] env[62405]: DEBUG oslo_vmware.api [None req-6ad4c3d4-82e8-4e57-9545-5a9160fd44fc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Waiting for the task: (returnval){ [ 2119.608897] env[62405]: value = "task-1948295" [ 2119.608897] env[62405]: _type = "Task" [ 2119.608897] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2119.616500] env[62405]: DEBUG oslo_vmware.api [None req-6ad4c3d4-82e8-4e57-9545-5a9160fd44fc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948295, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2119.961700] env[62405]: DEBUG nova.compute.manager [None req-da2e94e4-94b9-455f-99bb-6b72b3f2db55 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 9c30bac3-d4f0-4779-9f6e-bc83bb84b001] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2119.961700] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-da2e94e4-94b9-455f-99bb-6b72b3f2db55 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 9c30bac3-d4f0-4779-9f6e-bc83bb84b001] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2119.962639] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d8e6800-5829-49d2-824a-1c3942eab699 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2119.970547] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-da2e94e4-94b9-455f-99bb-6b72b3f2db55 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 9c30bac3-d4f0-4779-9f6e-bc83bb84b001] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2119.970759] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ffceda33-d942-489e-bc1d-265c8008341f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2120.008756] env[62405]: DEBUG nova.network.neutron [-] [instance: a91a6d04-2ec0-4568-bdb3-732d148644de] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2120.030788] env[62405]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d29a81de-2a6b-41e6-b490-5d9ba1d28805 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2120.039805] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb342047-e181-4578-b607-c3208fbce0ce {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2120.051123] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-da2e94e4-94b9-455f-99bb-6b72b3f2db55 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 9c30bac3-d4f0-4779-9f6e-bc83bb84b001] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2120.051370] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-da2e94e4-94b9-455f-99bb-6b72b3f2db55 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 9c30bac3-d4f0-4779-9f6e-bc83bb84b001] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2120.051559] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-da2e94e4-94b9-455f-99bb-6b72b3f2db55 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Deleting the datastore file [datastore1] 9c30bac3-d4f0-4779-9f6e-bc83bb84b001 {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2120.052152] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1f014d08-2852-4aab-aaa0-d88ab53dbf7b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2120.061236] env[62405]: DEBUG oslo_vmware.api [None req-da2e94e4-94b9-455f-99bb-6b72b3f2db55 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for the task: (returnval){ [ 2120.061236] env[62405]: value = "task-1948297" [ 2120.061236] env[62405]: _type = "Task" [ 2120.061236] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2120.072486] env[62405]: DEBUG nova.compute.manager [req-d130f527-e86b-4712-9fa1-d8b6a4ec4be3 req-ee65eeb5-d61c-4b20-88b5-05ccd5678984 service nova] [instance: a91a6d04-2ec0-4568-bdb3-732d148644de] Detach interface failed, port_id=744277fe-5ae4-47a1-8b6e-f92b066ed2a3, reason: Instance a91a6d04-2ec0-4568-bdb3-732d148644de could not be found. {{(pid=62405) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11483}} [ 2120.077584] env[62405]: DEBUG oslo_vmware.api [None req-da2e94e4-94b9-455f-99bb-6b72b3f2db55 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948297, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2120.098672] env[62405]: DEBUG nova.objects.instance [None req-62665d04-498a-451a-958a-641c646b7c33 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Lazy-loading 'flavor' on Instance uuid 7c74cae9-1607-4928-a927-f0c8b86f7698 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2120.119447] env[62405]: DEBUG oslo_vmware.api [None req-6ad4c3d4-82e8-4e57-9545-5a9160fd44fc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948295, 'name': ReconfigVM_Task, 'duration_secs': 0.274162} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2120.119824] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-6ad4c3d4-82e8-4e57-9545-5a9160fd44fc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec] Reconfigured VM instance instance-00000073 to attach disk [datastore1] 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec/60ccb9f6-29ba-44eb-8cec-0d9b78c235ec.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2120.120509] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4112eb69-5958-48d5-8541-d9939ef37695 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2120.126795] env[62405]: DEBUG oslo_vmware.api [None req-6ad4c3d4-82e8-4e57-9545-5a9160fd44fc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Waiting for the task: (returnval){ [ 2120.126795] env[62405]: value = "task-1948298" [ 2120.126795] env[62405]: _type = "Task" [ 2120.126795] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2120.134736] env[62405]: DEBUG oslo_vmware.api [None req-6ad4c3d4-82e8-4e57-9545-5a9160fd44fc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948298, 'name': Rename_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2120.463351] env[62405]: DEBUG oslo_concurrency.lockutils [None req-858e48b4-be22-4ff2-855f-504fc4a76bd8 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.010s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2120.511775] env[62405]: INFO nova.compute.manager [-] [instance: a91a6d04-2ec0-4568-bdb3-732d148644de] Took 1.29 seconds to deallocate network for instance. [ 2120.572207] env[62405]: DEBUG oslo_vmware.api [None req-da2e94e4-94b9-455f-99bb-6b72b3f2db55 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948297, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.150768} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2120.572491] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-da2e94e4-94b9-455f-99bb-6b72b3f2db55 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2120.572706] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-da2e94e4-94b9-455f-99bb-6b72b3f2db55 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 9c30bac3-d4f0-4779-9f6e-bc83bb84b001] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2120.572877] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-da2e94e4-94b9-455f-99bb-6b72b3f2db55 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 9c30bac3-d4f0-4779-9f6e-bc83bb84b001] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2120.573061] env[62405]: INFO nova.compute.manager [None req-da2e94e4-94b9-455f-99bb-6b72b3f2db55 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 9c30bac3-d4f0-4779-9f6e-bc83bb84b001] Took 0.61 seconds to destroy the instance on the hypervisor. [ 2120.573300] env[62405]: DEBUG oslo.service.loopingcall [None req-da2e94e4-94b9-455f-99bb-6b72b3f2db55 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2120.573483] env[62405]: DEBUG nova.compute.manager [-] [instance: 9c30bac3-d4f0-4779-9f6e-bc83bb84b001] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2120.573578] env[62405]: DEBUG nova.network.neutron [-] [instance: 9c30bac3-d4f0-4779-9f6e-bc83bb84b001] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2120.636791] env[62405]: DEBUG oslo_vmware.api [None req-6ad4c3d4-82e8-4e57-9545-5a9160fd44fc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948298, 'name': Rename_Task, 'duration_secs': 0.138695} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2120.636990] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ad4c3d4-82e8-4e57-9545-5a9160fd44fc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2120.637260] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2652126a-7b7e-48e1-8d6e-f27552e08f3e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2120.642844] env[62405]: DEBUG oslo_vmware.api [None req-6ad4c3d4-82e8-4e57-9545-5a9160fd44fc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Waiting for the task: (returnval){ [ 2120.642844] env[62405]: value = "task-1948299" [ 2120.642844] env[62405]: _type = "Task" [ 2120.642844] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2120.650150] env[62405]: DEBUG oslo_vmware.api [None req-6ad4c3d4-82e8-4e57-9545-5a9160fd44fc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948299, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2121.018256] env[62405]: DEBUG oslo_concurrency.lockutils [None req-fa65059a-c1a6-46ae-8e1b-892d49562ab7 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2121.018715] env[62405]: DEBUG oslo_concurrency.lockutils [None req-fa65059a-c1a6-46ae-8e1b-892d49562ab7 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2121.019109] env[62405]: DEBUG nova.objects.instance [None req-fa65059a-c1a6-46ae-8e1b-892d49562ab7 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Lazy-loading 'resources' on Instance uuid a91a6d04-2ec0-4568-bdb3-732d148644de {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2121.105787] env[62405]: DEBUG oslo_concurrency.lockutils [None req-62665d04-498a-451a-958a-641c646b7c33 tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Lock "7c74cae9-1607-4928-a927-f0c8b86f7698" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.234s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2121.120076] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-bfeda307-a0ce-4f1c-9353-7045fb626b94 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] Volume attach. Driver type: vmdk {{(pid=62405) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 2121.120076] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-bfeda307-a0ce-4f1c-9353-7045fb626b94 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-401604', 'volume_id': '1f963a86-6332-49b1-ac32-d80995d45115', 'name': 'volume-1f963a86-6332-49b1-ac32-d80995d45115', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f1e9a2e7-0fd3-4a89-8c33-bab6d1987230', 'attached_at': '', 'detached_at': '', 'volume_id': '1f963a86-6332-49b1-ac32-d80995d45115', 'serial': '1f963a86-6332-49b1-ac32-d80995d45115'} {{(pid=62405) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 2121.121190] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb4fc2ba-ce37-4919-88ee-9dcc0470e3a5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.137170] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d16f99f1-2382-44d7-8cf5-a6ed919187e2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.164922] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-bfeda307-a0ce-4f1c-9353-7045fb626b94 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] Reconfiguring VM instance instance-00000071 to attach disk [datastore1] volume-1f963a86-6332-49b1-ac32-d80995d45115/volume-1f963a86-6332-49b1-ac32-d80995d45115.vmdk or device None with type thin {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2121.167984] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-824234be-48d4-4453-b777-dec863cc7aea {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.186761] env[62405]: DEBUG oslo_vmware.api [None req-6ad4c3d4-82e8-4e57-9545-5a9160fd44fc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948299, 'name': PowerOnVM_Task, 'duration_secs': 0.446305} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2121.187983] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ad4c3d4-82e8-4e57-9545-5a9160fd44fc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2121.188209] env[62405]: DEBUG nova.compute.manager [None req-6ad4c3d4-82e8-4e57-9545-5a9160fd44fc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2121.188533] env[62405]: DEBUG oslo_vmware.api [None req-bfeda307-a0ce-4f1c-9353-7045fb626b94 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Waiting for the task: (returnval){ [ 2121.188533] env[62405]: value = "task-1948300" [ 2121.188533] env[62405]: _type = "Task" [ 2121.188533] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2121.189323] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-246a30c7-d054-4b13-97b5-cb053a73acb3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.199776] env[62405]: DEBUG oslo_vmware.api [None req-bfeda307-a0ce-4f1c-9353-7045fb626b94 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Task: {'id': task-1948300, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2121.285233] env[62405]: DEBUG nova.network.neutron [-] [instance: 9c30bac3-d4f0-4779-9f6e-bc83bb84b001] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2121.552997] env[62405]: DEBUG nova.compute.manager [req-a034baa3-fe77-4719-9510-daf572f5bfef req-66dd9d75-33f9-4521-88b4-04cdb39a0b8d service nova] [instance: 9c30bac3-d4f0-4779-9f6e-bc83bb84b001] Received event network-vif-deleted-6abc3e61-4638-4911-b589-f37ab143d2b1 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2121.648620] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfbd0f0c-579a-4dc6-b4d6-b467673b0260 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.656521] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0a44b5d-c4e0-48db-91bb-32a3346306ec {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.687804] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c74e9a22-4b20-48e9-aabb-7db35e55fb61 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.700568] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aeb4ad8b-6585-472a-86bd-961e2482c89a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.711817] env[62405]: DEBUG oslo_vmware.api [None req-bfeda307-a0ce-4f1c-9353-7045fb626b94 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Task: {'id': task-1948300, 'name': ReconfigVM_Task, 'duration_secs': 0.423167} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2121.713994] env[62405]: DEBUG oslo_concurrency.lockutils [None req-6ad4c3d4-82e8-4e57-9545-5a9160fd44fc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2121.715240] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-bfeda307-a0ce-4f1c-9353-7045fb626b94 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] Reconfigured VM instance instance-00000071 to attach disk [datastore1] volume-1f963a86-6332-49b1-ac32-d80995d45115/volume-1f963a86-6332-49b1-ac32-d80995d45115.vmdk or device None with type thin {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2121.728598] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bb143e06-577c-4ec6-9210-35b44a9b3edc {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.738554] env[62405]: DEBUG nova.compute.provider_tree [None req-fa65059a-c1a6-46ae-8e1b-892d49562ab7 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2121.746598] env[62405]: DEBUG oslo_vmware.api [None req-bfeda307-a0ce-4f1c-9353-7045fb626b94 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Waiting for the task: (returnval){ [ 2121.746598] env[62405]: value = "task-1948301" [ 2121.746598] env[62405]: _type = "Task" [ 2121.746598] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2121.755886] env[62405]: DEBUG oslo_vmware.api [None req-bfeda307-a0ce-4f1c-9353-7045fb626b94 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Task: {'id': task-1948301, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2121.787950] env[62405]: INFO nova.compute.manager [-] [instance: 9c30bac3-d4f0-4779-9f6e-bc83bb84b001] Took 1.21 seconds to deallocate network for instance. [ 2122.193565] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d56c2df9-9ca8-4576-b3fe-4c4fddbc134e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Acquiring lock "7c74cae9-1607-4928-a927-f0c8b86f7698" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2122.193847] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d56c2df9-9ca8-4576-b3fe-4c4fddbc134e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Lock "7c74cae9-1607-4928-a927-f0c8b86f7698" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2122.194083] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d56c2df9-9ca8-4576-b3fe-4c4fddbc134e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Acquiring lock "7c74cae9-1607-4928-a927-f0c8b86f7698-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2122.194278] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d56c2df9-9ca8-4576-b3fe-4c4fddbc134e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Lock "7c74cae9-1607-4928-a927-f0c8b86f7698-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2122.194455] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d56c2df9-9ca8-4576-b3fe-4c4fddbc134e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Lock "7c74cae9-1607-4928-a927-f0c8b86f7698-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2122.196705] env[62405]: INFO nova.compute.manager [None req-d56c2df9-9ca8-4576-b3fe-4c4fddbc134e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 7c74cae9-1607-4928-a927-f0c8b86f7698] Terminating instance [ 2122.257842] env[62405]: DEBUG oslo_vmware.api [None req-bfeda307-a0ce-4f1c-9353-7045fb626b94 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Task: {'id': task-1948301, 'name': ReconfigVM_Task, 'duration_secs': 0.136771} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2122.258136] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-bfeda307-a0ce-4f1c-9353-7045fb626b94 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-401604', 'volume_id': '1f963a86-6332-49b1-ac32-d80995d45115', 'name': 'volume-1f963a86-6332-49b1-ac32-d80995d45115', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f1e9a2e7-0fd3-4a89-8c33-bab6d1987230', 'attached_at': '', 'detached_at': '', 'volume_id': '1f963a86-6332-49b1-ac32-d80995d45115', 'serial': '1f963a86-6332-49b1-ac32-d80995d45115'} {{(pid=62405) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 2122.272264] env[62405]: DEBUG nova.scheduler.client.report [None req-fa65059a-c1a6-46ae-8e1b-892d49562ab7 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Updated inventory for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with generation 183 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 2122.272544] env[62405]: DEBUG nova.compute.provider_tree [None req-fa65059a-c1a6-46ae-8e1b-892d49562ab7 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Updating resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 generation from 183 to 184 during operation: update_inventory {{(pid=62405) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2122.272745] env[62405]: DEBUG nova.compute.provider_tree [None req-fa65059a-c1a6-46ae-8e1b-892d49562ab7 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2122.294769] env[62405]: DEBUG oslo_concurrency.lockutils [None req-da2e94e4-94b9-455f-99bb-6b72b3f2db55 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2122.700167] env[62405]: DEBUG nova.compute.manager [None req-d56c2df9-9ca8-4576-b3fe-4c4fddbc134e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 7c74cae9-1607-4928-a927-f0c8b86f7698] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2122.700568] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-d56c2df9-9ca8-4576-b3fe-4c4fddbc134e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 7c74cae9-1607-4928-a927-f0c8b86f7698] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2122.701338] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42c767f3-ecdd-485b-b73f-278397c54dcd {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2122.708877] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-d56c2df9-9ca8-4576-b3fe-4c4fddbc134e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 7c74cae9-1607-4928-a927-f0c8b86f7698] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2122.709107] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-eed6adfd-b2f3-436e-881c-078404dd3de2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2122.715404] env[62405]: DEBUG oslo_vmware.api [None req-d56c2df9-9ca8-4576-b3fe-4c4fddbc134e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Waiting for the task: (returnval){ [ 2122.715404] env[62405]: value = "task-1948302" [ 2122.715404] env[62405]: _type = "Task" [ 2122.715404] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2122.722689] env[62405]: DEBUG oslo_vmware.api [None req-d56c2df9-9ca8-4576-b3fe-4c4fddbc134e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1948302, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2122.777596] env[62405]: DEBUG oslo_concurrency.lockutils [None req-fa65059a-c1a6-46ae-8e1b-892d49562ab7 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.759s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2122.780058] env[62405]: DEBUG oslo_concurrency.lockutils [None req-6ad4c3d4-82e8-4e57-9545-5a9160fd44fc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 1.066s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2122.780542] env[62405]: DEBUG nova.objects.instance [None req-6ad4c3d4-82e8-4e57-9545-5a9160fd44fc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62405) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 2122.798667] env[62405]: INFO nova.scheduler.client.report [None req-fa65059a-c1a6-46ae-8e1b-892d49562ab7 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Deleted allocations for instance a91a6d04-2ec0-4568-bdb3-732d148644de [ 2123.228370] env[62405]: DEBUG oslo_vmware.api [None req-d56c2df9-9ca8-4576-b3fe-4c4fddbc134e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1948302, 'name': PowerOffVM_Task, 'duration_secs': 0.171299} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2123.228679] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-d56c2df9-9ca8-4576-b3fe-4c4fddbc134e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 7c74cae9-1607-4928-a927-f0c8b86f7698] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2123.228939] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-d56c2df9-9ca8-4576-b3fe-4c4fddbc134e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 7c74cae9-1607-4928-a927-f0c8b86f7698] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2123.229292] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0e8700b9-e3a4-4029-81c5-5dd9ec9679d5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2123.291471] env[62405]: DEBUG nova.objects.instance [None req-bfeda307-a0ce-4f1c-9353-7045fb626b94 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Lazy-loading 'flavor' on Instance uuid f1e9a2e7-0fd3-4a89-8c33-bab6d1987230 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2123.308775] env[62405]: DEBUG oslo_concurrency.lockutils [None req-fa65059a-c1a6-46ae-8e1b-892d49562ab7 tempest-ServerRescueNegativeTestJSON-803816264 tempest-ServerRescueNegativeTestJSON-803816264-project-member] Lock "a91a6d04-2ec0-4568-bdb3-732d148644de" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.837s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2123.421696] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-d56c2df9-9ca8-4576-b3fe-4c4fddbc134e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 7c74cae9-1607-4928-a927-f0c8b86f7698] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2123.421957] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-d56c2df9-9ca8-4576-b3fe-4c4fddbc134e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 7c74cae9-1607-4928-a927-f0c8b86f7698] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2123.422210] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-d56c2df9-9ca8-4576-b3fe-4c4fddbc134e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Deleting the datastore file [datastore1] 7c74cae9-1607-4928-a927-f0c8b86f7698 {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2123.422512] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7c4405da-0bed-445d-8954-c604efaadec3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2123.429497] env[62405]: DEBUG oslo_vmware.api [None req-d56c2df9-9ca8-4576-b3fe-4c4fddbc134e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Waiting for the task: (returnval){ [ 2123.429497] env[62405]: value = "task-1948304" [ 2123.429497] env[62405]: _type = "Task" [ 2123.429497] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2123.437540] env[62405]: DEBUG oslo_vmware.api [None req-d56c2df9-9ca8-4576-b3fe-4c4fddbc134e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1948304, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2123.790824] env[62405]: DEBUG oslo_concurrency.lockutils [None req-6ad4c3d4-82e8-4e57-9545-5a9160fd44fc tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.011s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2123.791920] env[62405]: DEBUG oslo_concurrency.lockutils [None req-da2e94e4-94b9-455f-99bb-6b72b3f2db55 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.497s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2123.792172] env[62405]: DEBUG nova.objects.instance [None req-da2e94e4-94b9-455f-99bb-6b72b3f2db55 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Lazy-loading 'resources' on Instance uuid 9c30bac3-d4f0-4779-9f6e-bc83bb84b001 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2123.798065] env[62405]: DEBUG oslo_concurrency.lockutils [None req-bfeda307-a0ce-4f1c-9353-7045fb626b94 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Lock "f1e9a2e7-0fd3-4a89-8c33-bab6d1987230" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.272s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2123.940300] env[62405]: DEBUG oslo_vmware.api [None req-d56c2df9-9ca8-4576-b3fe-4c4fddbc134e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Task: {'id': task-1948304, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.122264} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2123.940618] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-d56c2df9-9ca8-4576-b3fe-4c4fddbc134e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2123.941283] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-d56c2df9-9ca8-4576-b3fe-4c4fddbc134e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 7c74cae9-1607-4928-a927-f0c8b86f7698] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2123.941283] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-d56c2df9-9ca8-4576-b3fe-4c4fddbc134e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 7c74cae9-1607-4928-a927-f0c8b86f7698] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2123.941283] env[62405]: INFO nova.compute.manager [None req-d56c2df9-9ca8-4576-b3fe-4c4fddbc134e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] [instance: 7c74cae9-1607-4928-a927-f0c8b86f7698] Took 1.24 seconds to destroy the instance on the hypervisor. [ 2123.941499] env[62405]: DEBUG oslo.service.loopingcall [None req-d56c2df9-9ca8-4576-b3fe-4c4fddbc134e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2123.941647] env[62405]: DEBUG nova.compute.manager [-] [instance: 7c74cae9-1607-4928-a927-f0c8b86f7698] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2123.941750] env[62405]: DEBUG nova.network.neutron [-] [instance: 7c74cae9-1607-4928-a927-f0c8b86f7698] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2124.041264] env[62405]: DEBUG oslo_concurrency.lockutils [None req-1e2f28ee-9411-4722-b0a7-0efb5d64e623 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Acquiring lock "f1e9a2e7-0fd3-4a89-8c33-bab6d1987230" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2124.041532] env[62405]: DEBUG oslo_concurrency.lockutils [None req-1e2f28ee-9411-4722-b0a7-0efb5d64e623 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Lock "f1e9a2e7-0fd3-4a89-8c33-bab6d1987230" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2124.405030] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0938712c-2e3c-47ae-996e-34e2fa3eff6d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.412410] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f41f1fe-2aaf-478e-adde-6f83382e26ae {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.446008] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c58a8c14-a5de-4084-9d79-364457462fbf {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.462360] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9274ece0-2a24-43f5-bc11-dcb62246a3de {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.480588] env[62405]: DEBUG nova.compute.provider_tree [None req-da2e94e4-94b9-455f-99bb-6b72b3f2db55 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2124.545026] env[62405]: INFO nova.compute.manager [None req-1e2f28ee-9411-4722-b0a7-0efb5d64e623 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] Detaching volume cd1772e2-3baa-4a6d-ad99-752be5e16145 [ 2124.582025] env[62405]: INFO nova.virt.block_device [None req-1e2f28ee-9411-4722-b0a7-0efb5d64e623 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] Attempting to driver detach volume cd1772e2-3baa-4a6d-ad99-752be5e16145 from mountpoint /dev/sdb [ 2124.582421] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-1e2f28ee-9411-4722-b0a7-0efb5d64e623 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] Volume detach. Driver type: vmdk {{(pid=62405) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2124.582722] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-1e2f28ee-9411-4722-b0a7-0efb5d64e623 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-401600', 'volume_id': 'cd1772e2-3baa-4a6d-ad99-752be5e16145', 'name': 'volume-cd1772e2-3baa-4a6d-ad99-752be5e16145', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f1e9a2e7-0fd3-4a89-8c33-bab6d1987230', 'attached_at': '', 'detached_at': '', 'volume_id': 'cd1772e2-3baa-4a6d-ad99-752be5e16145', 'serial': 'cd1772e2-3baa-4a6d-ad99-752be5e16145'} {{(pid=62405) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2124.583746] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-542915a1-a063-4b5f-8263-39004d7e8321 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.608376] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-944ed2bc-4582-403d-aac9-5440de9d9566 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.616324] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbc1a07b-91b4-4b23-a412-3102448a8d55 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.641792] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d3e8f3c-4caf-49de-bca4-871f6e7dcf4c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.658030] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-1e2f28ee-9411-4722-b0a7-0efb5d64e623 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] The volume has not been displaced from its original location: [datastore1] volume-cd1772e2-3baa-4a6d-ad99-752be5e16145/volume-cd1772e2-3baa-4a6d-ad99-752be5e16145.vmdk. No consolidation needed. {{(pid=62405) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2124.662762] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-1e2f28ee-9411-4722-b0a7-0efb5d64e623 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] Reconfiguring VM instance instance-00000071 to detach disk 2001 {{(pid=62405) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2124.663744] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e1b5e284-dd69-439f-83df-c28699741cc6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.683628] env[62405]: DEBUG oslo_vmware.api [None req-1e2f28ee-9411-4722-b0a7-0efb5d64e623 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Waiting for the task: (returnval){ [ 2124.683628] env[62405]: value = "task-1948305" [ 2124.683628] env[62405]: _type = "Task" [ 2124.683628] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2124.694456] env[62405]: DEBUG oslo_vmware.api [None req-1e2f28ee-9411-4722-b0a7-0efb5d64e623 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Task: {'id': task-1948305, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2124.886165] env[62405]: DEBUG nova.compute.manager [req-045a7d9d-cfde-40ee-955f-7b2508c3eb19 req-365a83bc-be90-451f-a217-14f02c477623 service nova] [instance: 7c74cae9-1607-4928-a927-f0c8b86f7698] Received event network-vif-deleted-afd2e7f4-e21f-433e-ab9a-fb2e5d1e3993 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2124.886669] env[62405]: INFO nova.compute.manager [req-045a7d9d-cfde-40ee-955f-7b2508c3eb19 req-365a83bc-be90-451f-a217-14f02c477623 service nova] [instance: 7c74cae9-1607-4928-a927-f0c8b86f7698] Neutron deleted interface afd2e7f4-e21f-433e-ab9a-fb2e5d1e3993; detaching it from the instance and deleting it from the info cache [ 2124.887291] env[62405]: DEBUG nova.network.neutron [req-045a7d9d-cfde-40ee-955f-7b2508c3eb19 req-365a83bc-be90-451f-a217-14f02c477623 service nova] [instance: 7c74cae9-1607-4928-a927-f0c8b86f7698] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2124.983260] env[62405]: DEBUG nova.scheduler.client.report [None req-da2e94e4-94b9-455f-99bb-6b72b3f2db55 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2125.194334] env[62405]: DEBUG oslo_vmware.api [None req-1e2f28ee-9411-4722-b0a7-0efb5d64e623 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Task: {'id': task-1948305, 'name': ReconfigVM_Task, 'duration_secs': 0.339085} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2125.194613] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-1e2f28ee-9411-4722-b0a7-0efb5d64e623 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] Reconfigured VM instance instance-00000071 to detach disk 2001 {{(pid=62405) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2125.199642] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c2456b25-72d8-40c0-a315-9f0550587e0f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2125.216497] env[62405]: DEBUG oslo_vmware.api [None req-1e2f28ee-9411-4722-b0a7-0efb5d64e623 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Waiting for the task: (returnval){ [ 2125.216497] env[62405]: value = "task-1948306" [ 2125.216497] env[62405]: _type = "Task" [ 2125.216497] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2125.226092] env[62405]: DEBUG oslo_vmware.api [None req-1e2f28ee-9411-4722-b0a7-0efb5d64e623 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Task: {'id': task-1948306, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2125.355921] env[62405]: DEBUG nova.network.neutron [-] [instance: 7c74cae9-1607-4928-a927-f0c8b86f7698] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2125.391357] env[62405]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-799a1de3-6db9-496c-83a3-19d6f3e587be {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2125.400987] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ef73f91-21ee-41d7-946b-69bae581389c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2125.431110] env[62405]: DEBUG nova.compute.manager [req-045a7d9d-cfde-40ee-955f-7b2508c3eb19 req-365a83bc-be90-451f-a217-14f02c477623 service nova] [instance: 7c74cae9-1607-4928-a927-f0c8b86f7698] Detach interface failed, port_id=afd2e7f4-e21f-433e-ab9a-fb2e5d1e3993, reason: Instance 7c74cae9-1607-4928-a927-f0c8b86f7698 could not be found. {{(pid=62405) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11483}} [ 2125.487964] env[62405]: DEBUG oslo_concurrency.lockutils [None req-da2e94e4-94b9-455f-99bb-6b72b3f2db55 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.696s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2125.513422] env[62405]: INFO nova.scheduler.client.report [None req-da2e94e4-94b9-455f-99bb-6b72b3f2db55 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Deleted allocations for instance 9c30bac3-d4f0-4779-9f6e-bc83bb84b001 [ 2125.726819] env[62405]: DEBUG oslo_vmware.api [None req-1e2f28ee-9411-4722-b0a7-0efb5d64e623 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Task: {'id': task-1948306, 'name': ReconfigVM_Task, 'duration_secs': 0.13234} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2125.727155] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-1e2f28ee-9411-4722-b0a7-0efb5d64e623 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-401600', 'volume_id': 'cd1772e2-3baa-4a6d-ad99-752be5e16145', 'name': 'volume-cd1772e2-3baa-4a6d-ad99-752be5e16145', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f1e9a2e7-0fd3-4a89-8c33-bab6d1987230', 'attached_at': '', 'detached_at': '', 'volume_id': 'cd1772e2-3baa-4a6d-ad99-752be5e16145', 'serial': 'cd1772e2-3baa-4a6d-ad99-752be5e16145'} {{(pid=62405) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2125.858347] env[62405]: INFO nova.compute.manager [-] [instance: 7c74cae9-1607-4928-a927-f0c8b86f7698] Took 1.92 seconds to deallocate network for instance. [ 2126.021241] env[62405]: DEBUG oslo_concurrency.lockutils [None req-da2e94e4-94b9-455f-99bb-6b72b3f2db55 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Lock "9c30bac3-d4f0-4779-9f6e-bc83bb84b001" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.574s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2126.271062] env[62405]: DEBUG nova.objects.instance [None req-1e2f28ee-9411-4722-b0a7-0efb5d64e623 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Lazy-loading 'flavor' on Instance uuid f1e9a2e7-0fd3-4a89-8c33-bab6d1987230 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2126.367666] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d56c2df9-9ca8-4576-b3fe-4c4fddbc134e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2126.367995] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d56c2df9-9ca8-4576-b3fe-4c4fddbc134e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2126.368254] env[62405]: DEBUG nova.objects.instance [None req-d56c2df9-9ca8-4576-b3fe-4c4fddbc134e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Lazy-loading 'resources' on Instance uuid 7c74cae9-1607-4928-a927-f0c8b86f7698 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2126.965847] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edf74d0c-6c42-4152-9153-8e82cebc8d39 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2126.973487] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50bb00be-7ff7-4f7f-9dac-8b0c00b8a9cc {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2127.003745] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db3e7713-fd13-4471-9d91-a09bd86225a1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2127.011240] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d323bd2-5419-4df1-8fd6-ddd42311ffc8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2127.024122] env[62405]: DEBUG nova.compute.provider_tree [None req-d56c2df9-9ca8-4576-b3fe-4c4fddbc134e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2127.281378] env[62405]: DEBUG oslo_concurrency.lockutils [None req-1e2f28ee-9411-4722-b0a7-0efb5d64e623 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Lock "f1e9a2e7-0fd3-4a89-8c33-bab6d1987230" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.240s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2127.292596] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3eafc84b-ebe4-4233-8a5e-eac436f31ba8 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Acquiring lock "f1e9a2e7-0fd3-4a89-8c33-bab6d1987230" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2127.292870] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3eafc84b-ebe4-4233-8a5e-eac436f31ba8 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Lock "f1e9a2e7-0fd3-4a89-8c33-bab6d1987230" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2127.526880] env[62405]: DEBUG nova.scheduler.client.report [None req-d56c2df9-9ca8-4576-b3fe-4c4fddbc134e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2127.797065] env[62405]: INFO nova.compute.manager [None req-3eafc84b-ebe4-4233-8a5e-eac436f31ba8 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] Detaching volume 1f963a86-6332-49b1-ac32-d80995d45115 [ 2127.830465] env[62405]: INFO nova.virt.block_device [None req-3eafc84b-ebe4-4233-8a5e-eac436f31ba8 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] Attempting to driver detach volume 1f963a86-6332-49b1-ac32-d80995d45115 from mountpoint /dev/sdc [ 2127.830721] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-3eafc84b-ebe4-4233-8a5e-eac436f31ba8 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] Volume detach. Driver type: vmdk {{(pid=62405) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2127.830910] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-3eafc84b-ebe4-4233-8a5e-eac436f31ba8 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-401604', 'volume_id': '1f963a86-6332-49b1-ac32-d80995d45115', 'name': 'volume-1f963a86-6332-49b1-ac32-d80995d45115', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f1e9a2e7-0fd3-4a89-8c33-bab6d1987230', 'attached_at': '', 'detached_at': '', 'volume_id': '1f963a86-6332-49b1-ac32-d80995d45115', 'serial': '1f963a86-6332-49b1-ac32-d80995d45115'} {{(pid=62405) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2127.831810] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6464b10-455b-4629-bc27-41eefc897c27 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2127.853189] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-656f3b44-8e56-423b-99fd-66ee8adc8b0a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2127.859478] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90ab1f27-54a3-4440-8c30-7fd76f32380e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2127.878592] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c709f3b2-5eab-4328-98f8-c68a48cf9588 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2127.895794] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-3eafc84b-ebe4-4233-8a5e-eac436f31ba8 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] The volume has not been displaced from its original location: [datastore1] volume-1f963a86-6332-49b1-ac32-d80995d45115/volume-1f963a86-6332-49b1-ac32-d80995d45115.vmdk. No consolidation needed. {{(pid=62405) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2127.901132] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-3eafc84b-ebe4-4233-8a5e-eac436f31ba8 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] Reconfiguring VM instance instance-00000071 to detach disk 2002 {{(pid=62405) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2127.901460] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bdc6eb86-4040-4d5c-ba35-017622da1df9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2127.922024] env[62405]: DEBUG oslo_vmware.api [None req-3eafc84b-ebe4-4233-8a5e-eac436f31ba8 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Waiting for the task: (returnval){ [ 2127.922024] env[62405]: value = "task-1948307" [ 2127.922024] env[62405]: _type = "Task" [ 2127.922024] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2127.930576] env[62405]: DEBUG oslo_vmware.api [None req-3eafc84b-ebe4-4233-8a5e-eac436f31ba8 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Task: {'id': task-1948307, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2128.031812] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d56c2df9-9ca8-4576-b3fe-4c4fddbc134e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.664s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2128.053306] env[62405]: INFO nova.scheduler.client.report [None req-d56c2df9-9ca8-4576-b3fe-4c4fddbc134e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Deleted allocations for instance 7c74cae9-1607-4928-a927-f0c8b86f7698 [ 2128.431661] env[62405]: DEBUG oslo_vmware.api [None req-3eafc84b-ebe4-4233-8a5e-eac436f31ba8 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Task: {'id': task-1948307, 'name': ReconfigVM_Task, 'duration_secs': 0.228118} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2128.431937] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-3eafc84b-ebe4-4233-8a5e-eac436f31ba8 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] Reconfigured VM instance instance-00000071 to detach disk 2002 {{(pid=62405) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2128.436405] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-233c88ec-b087-49b0-9986-7bffff297066 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.451460] env[62405]: DEBUG oslo_vmware.api [None req-3eafc84b-ebe4-4233-8a5e-eac436f31ba8 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Waiting for the task: (returnval){ [ 2128.451460] env[62405]: value = "task-1948308" [ 2128.451460] env[62405]: _type = "Task" [ 2128.451460] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2128.461009] env[62405]: DEBUG oslo_vmware.api [None req-3eafc84b-ebe4-4233-8a5e-eac436f31ba8 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Task: {'id': task-1948308, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2128.562069] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d56c2df9-9ca8-4576-b3fe-4c4fddbc134e tempest-AttachVolumeNegativeTest-1699536865 tempest-AttachVolumeNegativeTest-1699536865-project-member] Lock "7c74cae9-1607-4928-a927-f0c8b86f7698" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.368s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2128.567499] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Acquiring lock "f4af587c-08d3-457e-a20d-a5ea8aad311f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2128.567736] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Lock "f4af587c-08d3-457e-a20d-a5ea8aad311f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2128.961991] env[62405]: DEBUG oslo_vmware.api [None req-3eafc84b-ebe4-4233-8a5e-eac436f31ba8 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Task: {'id': task-1948308, 'name': ReconfigVM_Task, 'duration_secs': 0.220676} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2128.962278] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-3eafc84b-ebe4-4233-8a5e-eac436f31ba8 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-401604', 'volume_id': '1f963a86-6332-49b1-ac32-d80995d45115', 'name': 'volume-1f963a86-6332-49b1-ac32-d80995d45115', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f1e9a2e7-0fd3-4a89-8c33-bab6d1987230', 'attached_at': '', 'detached_at': '', 'volume_id': '1f963a86-6332-49b1-ac32-d80995d45115', 'serial': '1f963a86-6332-49b1-ac32-d80995d45115'} {{(pid=62405) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2129.070563] env[62405]: DEBUG nova.compute.manager [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2129.378342] env[62405]: DEBUG oslo_concurrency.lockutils [None req-64bbb6d5-3719-4309-a598-8a972bf729d3 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Acquiring lock "fd311606-a314-4030-9d51-929993ab6b14" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2129.378612] env[62405]: DEBUG oslo_concurrency.lockutils [None req-64bbb6d5-3719-4309-a598-8a972bf729d3 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Lock "fd311606-a314-4030-9d51-929993ab6b14" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2129.378806] env[62405]: INFO nova.compute.manager [None req-64bbb6d5-3719-4309-a598-8a972bf729d3 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: fd311606-a314-4030-9d51-929993ab6b14] Shelving [ 2129.505956] env[62405]: DEBUG nova.objects.instance [None req-3eafc84b-ebe4-4233-8a5e-eac436f31ba8 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Lazy-loading 'flavor' on Instance uuid f1e9a2e7-0fd3-4a89-8c33-bab6d1987230 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2129.591859] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2129.592127] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2129.594012] env[62405]: INFO nova.compute.claims [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2130.390577] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-64bbb6d5-3719-4309-a598-8a972bf729d3 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: fd311606-a314-4030-9d51-929993ab6b14] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2130.390996] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-580ddc04-496d-4c9e-8ecb-e6b0a31019dd {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2130.398889] env[62405]: DEBUG oslo_vmware.api [None req-64bbb6d5-3719-4309-a598-8a972bf729d3 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for the task: (returnval){ [ 2130.398889] env[62405]: value = "task-1948310" [ 2130.398889] env[62405]: _type = "Task" [ 2130.398889] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2130.407705] env[62405]: DEBUG oslo_vmware.api [None req-64bbb6d5-3719-4309-a598-8a972bf729d3 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948310, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2130.512596] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3eafc84b-ebe4-4233-8a5e-eac436f31ba8 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Lock "f1e9a2e7-0fd3-4a89-8c33-bab6d1987230" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.220s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2130.691677] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90e5023a-4cac-4074-aaaf-6979eb42dff4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2130.699300] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09dd5b5d-531c-4033-a4de-e9baf6d13155 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2130.728170] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f689b3a-eac3-4fa0-844e-9e738861ad1f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2130.735371] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5480bae4-1a1b-4949-a990-05b3d2072cf9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2130.748069] env[62405]: DEBUG nova.compute.provider_tree [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2130.910388] env[62405]: DEBUG oslo_vmware.api [None req-64bbb6d5-3719-4309-a598-8a972bf729d3 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948310, 'name': PowerOffVM_Task, 'duration_secs': 0.163608} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2130.910709] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-64bbb6d5-3719-4309-a598-8a972bf729d3 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: fd311606-a314-4030-9d51-929993ab6b14] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2130.911511] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce4906fb-73e9-402d-a09e-bd26bbecaad7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2130.929011] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edb4b476-c44f-4123-8ce6-c2e862c90313 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2131.251707] env[62405]: DEBUG nova.scheduler.client.report [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2131.438971] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-64bbb6d5-3719-4309-a598-8a972bf729d3 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: fd311606-a314-4030-9d51-929993ab6b14] Creating Snapshot of the VM instance {{(pid=62405) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 2131.439344] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-90cd2085-c10f-4918-9edd-1f4e818fe3d4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2131.448965] env[62405]: DEBUG oslo_vmware.api [None req-64bbb6d5-3719-4309-a598-8a972bf729d3 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for the task: (returnval){ [ 2131.448965] env[62405]: value = "task-1948311" [ 2131.448965] env[62405]: _type = "Task" [ 2131.448965] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2131.458636] env[62405]: DEBUG oslo_vmware.api [None req-64bbb6d5-3719-4309-a598-8a972bf729d3 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948311, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2131.673212] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9875190a-2b60-452f-adcc-3e87f92a52d9 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Acquiring lock "f1e9a2e7-0fd3-4a89-8c33-bab6d1987230" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2131.673483] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9875190a-2b60-452f-adcc-3e87f92a52d9 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Lock "f1e9a2e7-0fd3-4a89-8c33-bab6d1987230" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2131.673700] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9875190a-2b60-452f-adcc-3e87f92a52d9 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Acquiring lock "f1e9a2e7-0fd3-4a89-8c33-bab6d1987230-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2131.673886] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9875190a-2b60-452f-adcc-3e87f92a52d9 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Lock "f1e9a2e7-0fd3-4a89-8c33-bab6d1987230-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2131.674074] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9875190a-2b60-452f-adcc-3e87f92a52d9 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Lock "f1e9a2e7-0fd3-4a89-8c33-bab6d1987230-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2131.676179] env[62405]: INFO nova.compute.manager [None req-9875190a-2b60-452f-adcc-3e87f92a52d9 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] Terminating instance [ 2131.756876] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.165s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2131.757466] env[62405]: DEBUG nova.compute.manager [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2131.959120] env[62405]: DEBUG oslo_vmware.api [None req-64bbb6d5-3719-4309-a598-8a972bf729d3 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948311, 'name': CreateSnapshot_Task, 'duration_secs': 0.420749} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2131.959405] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-64bbb6d5-3719-4309-a598-8a972bf729d3 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: fd311606-a314-4030-9d51-929993ab6b14] Created Snapshot of the VM instance {{(pid=62405) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 2131.960166] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56b53bc0-75e1-446b-922b-ea71ae9a6ddd {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2132.180212] env[62405]: DEBUG nova.compute.manager [None req-9875190a-2b60-452f-adcc-3e87f92a52d9 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2132.180506] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-9875190a-2b60-452f-adcc-3e87f92a52d9 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2132.181416] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8eb630d6-9a58-4892-9d45-d38d4fc123e5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2132.192997] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-9875190a-2b60-452f-adcc-3e87f92a52d9 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2132.193264] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-19cc8818-3ef1-4f70-bc8c-4321b47ad539 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2132.201026] env[62405]: DEBUG oslo_vmware.api [None req-9875190a-2b60-452f-adcc-3e87f92a52d9 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Waiting for the task: (returnval){ [ 2132.201026] env[62405]: value = "task-1948313" [ 2132.201026] env[62405]: _type = "Task" [ 2132.201026] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2132.208819] env[62405]: DEBUG oslo_vmware.api [None req-9875190a-2b60-452f-adcc-3e87f92a52d9 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Task: {'id': task-1948313, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2132.262719] env[62405]: DEBUG nova.compute.utils [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2132.264517] env[62405]: DEBUG nova.compute.manager [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2132.264683] env[62405]: DEBUG nova.network.neutron [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2132.310054] env[62405]: DEBUG nova.policy [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b4ac1534df994c18bad62ec85acbc69f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '03a423f493034065bb1591d14d215ed8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 2132.483417] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-64bbb6d5-3719-4309-a598-8a972bf729d3 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: fd311606-a314-4030-9d51-929993ab6b14] Creating linked-clone VM from snapshot {{(pid=62405) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 2132.483752] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-44810e16-d542-49fd-8b04-998eb43d76c7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2132.493858] env[62405]: DEBUG oslo_vmware.api [None req-64bbb6d5-3719-4309-a598-8a972bf729d3 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for the task: (returnval){ [ 2132.493858] env[62405]: value = "task-1948314" [ 2132.493858] env[62405]: _type = "Task" [ 2132.493858] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2132.502604] env[62405]: DEBUG oslo_vmware.api [None req-64bbb6d5-3719-4309-a598-8a972bf729d3 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948314, 'name': CloneVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2132.672389] env[62405]: DEBUG nova.network.neutron [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Successfully created port: 9dc5f509-f8ba-495b-8931-0591e98d462c {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2132.712312] env[62405]: DEBUG oslo_vmware.api [None req-9875190a-2b60-452f-adcc-3e87f92a52d9 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Task: {'id': task-1948313, 'name': PowerOffVM_Task, 'duration_secs': 0.257197} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2132.712765] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-9875190a-2b60-452f-adcc-3e87f92a52d9 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2132.713021] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-9875190a-2b60-452f-adcc-3e87f92a52d9 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2132.713324] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7c043940-f909-413a-b2f9-ec0ad60657a8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2132.767314] env[62405]: DEBUG nova.compute.manager [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2132.922196] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-9875190a-2b60-452f-adcc-3e87f92a52d9 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2132.922268] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-9875190a-2b60-452f-adcc-3e87f92a52d9 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2132.922461] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-9875190a-2b60-452f-adcc-3e87f92a52d9 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Deleting the datastore file [datastore1] f1e9a2e7-0fd3-4a89-8c33-bab6d1987230 {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2132.922868] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ea0dcd0e-e3ac-42c8-b01c-1e3f9f059773 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2132.935736] env[62405]: DEBUG oslo_vmware.api [None req-9875190a-2b60-452f-adcc-3e87f92a52d9 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Waiting for the task: (returnval){ [ 2132.935736] env[62405]: value = "task-1948316" [ 2132.935736] env[62405]: _type = "Task" [ 2132.935736] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2132.945541] env[62405]: DEBUG oslo_vmware.api [None req-9875190a-2b60-452f-adcc-3e87f92a52d9 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Task: {'id': task-1948316, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2133.004940] env[62405]: DEBUG oslo_vmware.api [None req-64bbb6d5-3719-4309-a598-8a972bf729d3 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948314, 'name': CloneVM_Task} progress is 94%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2133.446109] env[62405]: DEBUG oslo_vmware.api [None req-9875190a-2b60-452f-adcc-3e87f92a52d9 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Task: {'id': task-1948316, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.163696} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2133.446273] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-9875190a-2b60-452f-adcc-3e87f92a52d9 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2133.446457] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-9875190a-2b60-452f-adcc-3e87f92a52d9 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2133.446677] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-9875190a-2b60-452f-adcc-3e87f92a52d9 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2133.446833] env[62405]: INFO nova.compute.manager [None req-9875190a-2b60-452f-adcc-3e87f92a52d9 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] Took 1.27 seconds to destroy the instance on the hypervisor. [ 2133.447100] env[62405]: DEBUG oslo.service.loopingcall [None req-9875190a-2b60-452f-adcc-3e87f92a52d9 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2133.447296] env[62405]: DEBUG nova.compute.manager [-] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2133.447393] env[62405]: DEBUG nova.network.neutron [-] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2133.505831] env[62405]: DEBUG oslo_vmware.api [None req-64bbb6d5-3719-4309-a598-8a972bf729d3 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948314, 'name': CloneVM_Task} progress is 95%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2133.782020] env[62405]: DEBUG nova.compute.manager [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2133.807494] env[62405]: DEBUG nova.virt.hardware [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2133.807796] env[62405]: DEBUG nova.virt.hardware [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2133.807919] env[62405]: DEBUG nova.virt.hardware [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2133.808114] env[62405]: DEBUG nova.virt.hardware [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2133.808265] env[62405]: DEBUG nova.virt.hardware [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2133.808442] env[62405]: DEBUG nova.virt.hardware [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2133.808670] env[62405]: DEBUG nova.virt.hardware [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2133.808831] env[62405]: DEBUG nova.virt.hardware [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2133.809230] env[62405]: DEBUG nova.virt.hardware [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2133.809477] env[62405]: DEBUG nova.virt.hardware [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2133.809724] env[62405]: DEBUG nova.virt.hardware [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2133.810686] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83935d1b-1d0b-451d-b562-e94a88154217 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2133.819582] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29bdb515-0014-4717-8a6c-c23aa2e2b869 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2133.915413] env[62405]: DEBUG nova.compute.manager [req-57f98a35-7db8-42a3-aeb6-d4a7bf991d9c req-63f1b691-b6bb-45f0-bb9b-b8e0b1426941 service nova] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] Received event network-vif-deleted-3d9e960f-b38a-4714-93c0-7ff8857554fe {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2133.915714] env[62405]: INFO nova.compute.manager [req-57f98a35-7db8-42a3-aeb6-d4a7bf991d9c req-63f1b691-b6bb-45f0-bb9b-b8e0b1426941 service nova] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] Neutron deleted interface 3d9e960f-b38a-4714-93c0-7ff8857554fe; detaching it from the instance and deleting it from the info cache [ 2133.915957] env[62405]: DEBUG nova.network.neutron [req-57f98a35-7db8-42a3-aeb6-d4a7bf991d9c req-63f1b691-b6bb-45f0-bb9b-b8e0b1426941 service nova] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2134.005563] env[62405]: DEBUG oslo_vmware.api [None req-64bbb6d5-3719-4309-a598-8a972bf729d3 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948314, 'name': CloneVM_Task, 'duration_secs': 1.29977} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2134.005842] env[62405]: INFO nova.virt.vmwareapi.vmops [None req-64bbb6d5-3719-4309-a598-8a972bf729d3 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: fd311606-a314-4030-9d51-929993ab6b14] Created linked-clone VM from snapshot [ 2134.006561] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca53151f-d6e8-40b8-9510-eac52dba6942 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.014224] env[62405]: DEBUG nova.virt.vmwareapi.images [None req-64bbb6d5-3719-4309-a598-8a972bf729d3 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: fd311606-a314-4030-9d51-929993ab6b14] Uploading image 532d41d8-81a7-4af2-a07d-498462c3c81d {{(pid=62405) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 2134.040182] env[62405]: DEBUG oslo_vmware.rw_handles [None req-64bbb6d5-3719-4309-a598-8a972bf729d3 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 2134.040182] env[62405]: value = "vm-401606" [ 2134.040182] env[62405]: _type = "VirtualMachine" [ 2134.040182] env[62405]: }. {{(pid=62405) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 2134.040535] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-55c3240a-59d8-406b-9455-611eb5f967a7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.048406] env[62405]: DEBUG oslo_vmware.rw_handles [None req-64bbb6d5-3719-4309-a598-8a972bf729d3 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Lease: (returnval){ [ 2134.048406] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]527e71c0-72d3-4845-8665-c0d92ff02797" [ 2134.048406] env[62405]: _type = "HttpNfcLease" [ 2134.048406] env[62405]: } obtained for exporting VM: (result){ [ 2134.048406] env[62405]: value = "vm-401606" [ 2134.048406] env[62405]: _type = "VirtualMachine" [ 2134.048406] env[62405]: }. {{(pid=62405) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 2134.048762] env[62405]: DEBUG oslo_vmware.api [None req-64bbb6d5-3719-4309-a598-8a972bf729d3 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for the lease: (returnval){ [ 2134.048762] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]527e71c0-72d3-4845-8665-c0d92ff02797" [ 2134.048762] env[62405]: _type = "HttpNfcLease" [ 2134.048762] env[62405]: } to be ready. {{(pid=62405) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 2134.058011] env[62405]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2134.058011] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]527e71c0-72d3-4845-8665-c0d92ff02797" [ 2134.058011] env[62405]: _type = "HttpNfcLease" [ 2134.058011] env[62405]: } is initializing. {{(pid=62405) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2134.090635] env[62405]: DEBUG nova.compute.manager [req-b43611d4-3f2f-4645-95b5-53d7fd33ccbf req-70ad0392-72b3-485a-9626-4ed497bed27f service nova] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Received event network-vif-plugged-9dc5f509-f8ba-495b-8931-0591e98d462c {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2134.090926] env[62405]: DEBUG oslo_concurrency.lockutils [req-b43611d4-3f2f-4645-95b5-53d7fd33ccbf req-70ad0392-72b3-485a-9626-4ed497bed27f service nova] Acquiring lock "f4af587c-08d3-457e-a20d-a5ea8aad311f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2134.091152] env[62405]: DEBUG oslo_concurrency.lockutils [req-b43611d4-3f2f-4645-95b5-53d7fd33ccbf req-70ad0392-72b3-485a-9626-4ed497bed27f service nova] Lock "f4af587c-08d3-457e-a20d-a5ea8aad311f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2134.091326] env[62405]: DEBUG oslo_concurrency.lockutils [req-b43611d4-3f2f-4645-95b5-53d7fd33ccbf req-70ad0392-72b3-485a-9626-4ed497bed27f service nova] Lock "f4af587c-08d3-457e-a20d-a5ea8aad311f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2134.091569] env[62405]: DEBUG nova.compute.manager [req-b43611d4-3f2f-4645-95b5-53d7fd33ccbf req-70ad0392-72b3-485a-9626-4ed497bed27f service nova] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] No waiting events found dispatching network-vif-plugged-9dc5f509-f8ba-495b-8931-0591e98d462c {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2134.091764] env[62405]: WARNING nova.compute.manager [req-b43611d4-3f2f-4645-95b5-53d7fd33ccbf req-70ad0392-72b3-485a-9626-4ed497bed27f service nova] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Received unexpected event network-vif-plugged-9dc5f509-f8ba-495b-8931-0591e98d462c for instance with vm_state building and task_state spawning. [ 2134.175186] env[62405]: DEBUG nova.network.neutron [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Successfully updated port: 9dc5f509-f8ba-495b-8931-0591e98d462c {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2134.393249] env[62405]: DEBUG nova.network.neutron [-] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2134.421452] env[62405]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8e5a513b-4875-4b52-8c67-7566d1ddcd4e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.429052] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc8dba11-3f43-48ee-bd43-7f52a537755b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.457331] env[62405]: DEBUG nova.compute.manager [req-57f98a35-7db8-42a3-aeb6-d4a7bf991d9c req-63f1b691-b6bb-45f0-bb9b-b8e0b1426941 service nova] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] Detach interface failed, port_id=3d9e960f-b38a-4714-93c0-7ff8857554fe, reason: Instance f1e9a2e7-0fd3-4a89-8c33-bab6d1987230 could not be found. {{(pid=62405) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11483}} [ 2134.556217] env[62405]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2134.556217] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]527e71c0-72d3-4845-8665-c0d92ff02797" [ 2134.556217] env[62405]: _type = "HttpNfcLease" [ 2134.556217] env[62405]: } is ready. {{(pid=62405) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 2134.556640] env[62405]: DEBUG oslo_vmware.rw_handles [None req-64bbb6d5-3719-4309-a598-8a972bf729d3 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 2134.556640] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]527e71c0-72d3-4845-8665-c0d92ff02797" [ 2134.556640] env[62405]: _type = "HttpNfcLease" [ 2134.556640] env[62405]: }. {{(pid=62405) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 2134.557173] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c26de94-6ec4-4cc5-bcde-2317607a2550 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.564374] env[62405]: DEBUG oslo_vmware.rw_handles [None req-64bbb6d5-3719-4309-a598-8a972bf729d3 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/524ba6ce-a503-66b1-1d4d-84ea57208aa8/disk-0.vmdk from lease info. {{(pid=62405) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 2134.564547] env[62405]: DEBUG oslo_vmware.rw_handles [None req-64bbb6d5-3719-4309-a598-8a972bf729d3 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/524ba6ce-a503-66b1-1d4d-84ea57208aa8/disk-0.vmdk for reading. {{(pid=62405) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 2134.650787] env[62405]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-a606f294-0a1a-45e9-893b-52b102921e30 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.679810] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Acquiring lock "refresh_cache-f4af587c-08d3-457e-a20d-a5ea8aad311f" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2134.679957] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Acquired lock "refresh_cache-f4af587c-08d3-457e-a20d-a5ea8aad311f" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2134.680124] env[62405]: DEBUG nova.network.neutron [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2134.897208] env[62405]: INFO nova.compute.manager [-] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] Took 1.45 seconds to deallocate network for instance. [ 2135.222604] env[62405]: DEBUG nova.network.neutron [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2135.369089] env[62405]: DEBUG nova.network.neutron [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Updating instance_info_cache with network_info: [{"id": "9dc5f509-f8ba-495b-8931-0591e98d462c", "address": "fa:16:3e:21:aa:b0", "network": {"id": "a7be14dc-1fb5-4a85-a574-ff7086958535", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-359688506-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03a423f493034065bb1591d14d215ed8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77aa121f-8fb6-42f3-aaea-43addfe449b2", "external-id": "nsx-vlan-transportzone-288", "segmentation_id": 288, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9dc5f509-f8", "ovs_interfaceid": "9dc5f509-f8ba-495b-8931-0591e98d462c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2135.404606] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9875190a-2b60-452f-adcc-3e87f92a52d9 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2135.404960] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9875190a-2b60-452f-adcc-3e87f92a52d9 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2135.405254] env[62405]: DEBUG nova.objects.instance [None req-9875190a-2b60-452f-adcc-3e87f92a52d9 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Lazy-loading 'resources' on Instance uuid f1e9a2e7-0fd3-4a89-8c33-bab6d1987230 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2135.872998] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Releasing lock "refresh_cache-f4af587c-08d3-457e-a20d-a5ea8aad311f" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2135.873355] env[62405]: DEBUG nova.compute.manager [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Instance network_info: |[{"id": "9dc5f509-f8ba-495b-8931-0591e98d462c", "address": "fa:16:3e:21:aa:b0", "network": {"id": "a7be14dc-1fb5-4a85-a574-ff7086958535", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-359688506-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03a423f493034065bb1591d14d215ed8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77aa121f-8fb6-42f3-aaea-43addfe449b2", "external-id": "nsx-vlan-transportzone-288", "segmentation_id": 288, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9dc5f509-f8", "ovs_interfaceid": "9dc5f509-f8ba-495b-8931-0591e98d462c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2135.873819] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:21:aa:b0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '77aa121f-8fb6-42f3-aaea-43addfe449b2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9dc5f509-f8ba-495b-8931-0591e98d462c', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2135.882043] env[62405]: DEBUG oslo.service.loopingcall [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2135.882273] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2135.882718] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-41e085d9-0ae1-4923-8d25-f37c7ebc0ffa {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2135.903725] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2135.903725] env[62405]: value = "task-1948319" [ 2135.903725] env[62405]: _type = "Task" [ 2135.903725] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2135.914710] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1948319, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2136.015659] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a00decd9-4a6c-48df-a95c-7e6a8d198b1d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2136.023550] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed04002f-7c18-42d6-bca1-65b3217ee6c6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2136.054353] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a69b88ed-a830-42d0-ab1a-300e463f924b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2136.062098] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98a65236-264e-4b04-8b2e-5c9f9a6a4a28 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2136.075570] env[62405]: DEBUG nova.compute.provider_tree [None req-9875190a-2b60-452f-adcc-3e87f92a52d9 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2136.125273] env[62405]: DEBUG nova.compute.manager [req-931ba78e-26ff-43ea-af3e-9b63ab05cd33 req-9a3e9b18-1798-4052-baf2-d78994018416 service nova] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Received event network-changed-9dc5f509-f8ba-495b-8931-0591e98d462c {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2136.125598] env[62405]: DEBUG nova.compute.manager [req-931ba78e-26ff-43ea-af3e-9b63ab05cd33 req-9a3e9b18-1798-4052-baf2-d78994018416 service nova] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Refreshing instance network info cache due to event network-changed-9dc5f509-f8ba-495b-8931-0591e98d462c. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 2136.125957] env[62405]: DEBUG oslo_concurrency.lockutils [req-931ba78e-26ff-43ea-af3e-9b63ab05cd33 req-9a3e9b18-1798-4052-baf2-d78994018416 service nova] Acquiring lock "refresh_cache-f4af587c-08d3-457e-a20d-a5ea8aad311f" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2136.126174] env[62405]: DEBUG oslo_concurrency.lockutils [req-931ba78e-26ff-43ea-af3e-9b63ab05cd33 req-9a3e9b18-1798-4052-baf2-d78994018416 service nova] Acquired lock "refresh_cache-f4af587c-08d3-457e-a20d-a5ea8aad311f" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2136.126391] env[62405]: DEBUG nova.network.neutron [req-931ba78e-26ff-43ea-af3e-9b63ab05cd33 req-9a3e9b18-1798-4052-baf2-d78994018416 service nova] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Refreshing network info cache for port 9dc5f509-f8ba-495b-8931-0591e98d462c {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2136.415065] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1948319, 'name': CreateVM_Task, 'duration_secs': 0.484362} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2136.415261] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2136.415946] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2136.416129] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2136.416455] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2136.416712] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b0f74887-18e6-4cff-b7c3-73e74ff60da9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2136.422010] env[62405]: DEBUG oslo_vmware.api [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for the task: (returnval){ [ 2136.422010] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]525cfa2a-5122-5e2e-87a5-968d6f3c6b7b" [ 2136.422010] env[62405]: _type = "Task" [ 2136.422010] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2136.430043] env[62405]: DEBUG oslo_vmware.api [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]525cfa2a-5122-5e2e-87a5-968d6f3c6b7b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2136.579705] env[62405]: DEBUG nova.scheduler.client.report [None req-9875190a-2b60-452f-adcc-3e87f92a52d9 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2136.917753] env[62405]: DEBUG nova.network.neutron [req-931ba78e-26ff-43ea-af3e-9b63ab05cd33 req-9a3e9b18-1798-4052-baf2-d78994018416 service nova] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Updated VIF entry in instance network info cache for port 9dc5f509-f8ba-495b-8931-0591e98d462c. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2136.918144] env[62405]: DEBUG nova.network.neutron [req-931ba78e-26ff-43ea-af3e-9b63ab05cd33 req-9a3e9b18-1798-4052-baf2-d78994018416 service nova] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Updating instance_info_cache with network_info: [{"id": "9dc5f509-f8ba-495b-8931-0591e98d462c", "address": "fa:16:3e:21:aa:b0", "network": {"id": "a7be14dc-1fb5-4a85-a574-ff7086958535", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-359688506-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03a423f493034065bb1591d14d215ed8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77aa121f-8fb6-42f3-aaea-43addfe449b2", "external-id": "nsx-vlan-transportzone-288", "segmentation_id": 288, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9dc5f509-f8", "ovs_interfaceid": "9dc5f509-f8ba-495b-8931-0591e98d462c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2136.933018] env[62405]: DEBUG oslo_vmware.api [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]525cfa2a-5122-5e2e-87a5-968d6f3c6b7b, 'name': SearchDatastore_Task, 'duration_secs': 0.012668} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2136.934027] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2136.934027] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2136.934027] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2136.934027] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2136.934259] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2136.934368] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-310138cd-c1e0-4819-9667-ceee5663f4c3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2136.943189] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2136.943385] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2136.944085] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2a5f8c59-b424-4f39-b835-4b34b1da2ff1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2136.949570] env[62405]: DEBUG oslo_vmware.api [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for the task: (returnval){ [ 2136.949570] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52adcdc8-9a01-9474-467c-b5a4c01743ab" [ 2136.949570] env[62405]: _type = "Task" [ 2136.949570] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2136.957373] env[62405]: DEBUG oslo_vmware.api [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52adcdc8-9a01-9474-467c-b5a4c01743ab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2137.085339] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9875190a-2b60-452f-adcc-3e87f92a52d9 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.680s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2137.106204] env[62405]: INFO nova.scheduler.client.report [None req-9875190a-2b60-452f-adcc-3e87f92a52d9 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Deleted allocations for instance f1e9a2e7-0fd3-4a89-8c33-bab6d1987230 [ 2137.420714] env[62405]: DEBUG oslo_concurrency.lockutils [req-931ba78e-26ff-43ea-af3e-9b63ab05cd33 req-9a3e9b18-1798-4052-baf2-d78994018416 service nova] Releasing lock "refresh_cache-f4af587c-08d3-457e-a20d-a5ea8aad311f" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2137.459763] env[62405]: DEBUG oslo_vmware.api [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52adcdc8-9a01-9474-467c-b5a4c01743ab, 'name': SearchDatastore_Task, 'duration_secs': 0.013987} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2137.460562] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ee0aa811-c86a-449a-aea6-11f5e40dc631 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2137.465515] env[62405]: DEBUG oslo_vmware.api [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for the task: (returnval){ [ 2137.465515] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52a28941-997c-349b-a81e-05e4409351fc" [ 2137.465515] env[62405]: _type = "Task" [ 2137.465515] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2137.472650] env[62405]: DEBUG oslo_vmware.api [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52a28941-997c-349b-a81e-05e4409351fc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2137.614219] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9875190a-2b60-452f-adcc-3e87f92a52d9 tempest-AttachVolumeTestJSON-2095518101 tempest-AttachVolumeTestJSON-2095518101-project-member] Lock "f1e9a2e7-0fd3-4a89-8c33-bab6d1987230" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.941s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2137.976907] env[62405]: DEBUG oslo_vmware.api [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52a28941-997c-349b-a81e-05e4409351fc, 'name': SearchDatastore_Task, 'duration_secs': 0.028028} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2137.976907] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2137.976907] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] f4af587c-08d3-457e-a20d-a5ea8aad311f/f4af587c-08d3-457e-a20d-a5ea8aad311f.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2137.976907] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3646b2a9-eff5-4e04-8056-8d6b39167036 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2137.983964] env[62405]: DEBUG oslo_vmware.api [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for the task: (returnval){ [ 2137.983964] env[62405]: value = "task-1948320" [ 2137.983964] env[62405]: _type = "Task" [ 2137.983964] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2137.992251] env[62405]: DEBUG oslo_vmware.api [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948320, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2138.493333] env[62405]: DEBUG oslo_vmware.api [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948320, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2138.994690] env[62405]: DEBUG oslo_vmware.api [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948320, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.517126} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2138.995018] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] f4af587c-08d3-457e-a20d-a5ea8aad311f/f4af587c-08d3-457e-a20d-a5ea8aad311f.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2138.995160] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2138.995412] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6644dd42-c372-4468-9f6a-e5a0cbf9af72 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2139.003608] env[62405]: DEBUG oslo_vmware.api [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for the task: (returnval){ [ 2139.003608] env[62405]: value = "task-1948322" [ 2139.003608] env[62405]: _type = "Task" [ 2139.003608] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2139.011729] env[62405]: DEBUG oslo_vmware.api [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948322, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2139.515623] env[62405]: DEBUG oslo_vmware.api [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948322, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.087466} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2139.515950] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2139.516691] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df9e6d7b-a847-4511-9616-1650f01a8c0d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2139.540621] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Reconfiguring VM instance instance-00000078 to attach disk [datastore1] f4af587c-08d3-457e-a20d-a5ea8aad311f/f4af587c-08d3-457e-a20d-a5ea8aad311f.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2139.540947] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a936d0e3-4636-4d76-aae0-6da4676d8161 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2139.561383] env[62405]: DEBUG oslo_vmware.api [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for the task: (returnval){ [ 2139.561383] env[62405]: value = "task-1948323" [ 2139.561383] env[62405]: _type = "Task" [ 2139.561383] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2139.571016] env[62405]: DEBUG oslo_vmware.api [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948323, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2140.074185] env[62405]: DEBUG oslo_vmware.api [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948323, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2140.573384] env[62405]: DEBUG oslo_vmware.api [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948323, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2141.073880] env[62405]: DEBUG oslo_vmware.api [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948323, 'name': ReconfigVM_Task, 'duration_secs': 1.099323} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2141.076061] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Reconfigured VM instance instance-00000078 to attach disk [datastore1] f4af587c-08d3-457e-a20d-a5ea8aad311f/f4af587c-08d3-457e-a20d-a5ea8aad311f.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2141.076061] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5a2a96ff-bbd0-471b-888c-3e77ae245f56 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2141.081471] env[62405]: DEBUG oslo_vmware.api [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for the task: (returnval){ [ 2141.081471] env[62405]: value = "task-1948324" [ 2141.081471] env[62405]: _type = "Task" [ 2141.081471] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2141.089494] env[62405]: DEBUG oslo_vmware.api [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948324, 'name': Rename_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2141.591441] env[62405]: DEBUG oslo_vmware.api [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948324, 'name': Rename_Task, 'duration_secs': 0.181173} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2141.591719] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2141.591980] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e7235095-6d5d-455b-8848-a96cbb0df974 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2141.598761] env[62405]: DEBUG oslo_vmware.api [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for the task: (returnval){ [ 2141.598761] env[62405]: value = "task-1948326" [ 2141.598761] env[62405]: _type = "Task" [ 2141.598761] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2141.607928] env[62405]: DEBUG oslo_vmware.api [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948326, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2142.109598] env[62405]: DEBUG oslo_vmware.api [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948326, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2142.609203] env[62405]: DEBUG oslo_vmware.api [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948326, 'name': PowerOnVM_Task, 'duration_secs': 0.566911} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2142.609477] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2142.609683] env[62405]: INFO nova.compute.manager [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Took 8.83 seconds to spawn the instance on the hypervisor. [ 2142.609867] env[62405]: DEBUG nova.compute.manager [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2142.610657] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7620dc1b-3052-4c85-8694-95c67fd27ce1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2143.127803] env[62405]: INFO nova.compute.manager [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Took 13.55 seconds to build instance. [ 2143.264386] env[62405]: DEBUG oslo_vmware.rw_handles [None req-64bbb6d5-3719-4309-a598-8a972bf729d3 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/524ba6ce-a503-66b1-1d4d-84ea57208aa8/disk-0.vmdk. {{(pid=62405) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 2143.265348] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2914bea3-a8c8-4f7b-8c33-c226da04f0bf {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2143.272433] env[62405]: DEBUG oslo_vmware.rw_handles [None req-64bbb6d5-3719-4309-a598-8a972bf729d3 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/524ba6ce-a503-66b1-1d4d-84ea57208aa8/disk-0.vmdk is in state: ready. {{(pid=62405) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 2143.272433] env[62405]: ERROR oslo_vmware.rw_handles [None req-64bbb6d5-3719-4309-a598-8a972bf729d3 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/524ba6ce-a503-66b1-1d4d-84ea57208aa8/disk-0.vmdk due to incomplete transfer. [ 2143.272433] env[62405]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-36d72aa3-ceae-41e7-a03d-c63dbb4d1b6c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2143.279061] env[62405]: DEBUG oslo_vmware.rw_handles [None req-64bbb6d5-3719-4309-a598-8a972bf729d3 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/524ba6ce-a503-66b1-1d4d-84ea57208aa8/disk-0.vmdk. {{(pid=62405) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 2143.279222] env[62405]: DEBUG nova.virt.vmwareapi.images [None req-64bbb6d5-3719-4309-a598-8a972bf729d3 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: fd311606-a314-4030-9d51-929993ab6b14] Uploaded image 532d41d8-81a7-4af2-a07d-498462c3c81d to the Glance image server {{(pid=62405) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 2143.281551] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-64bbb6d5-3719-4309-a598-8a972bf729d3 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: fd311606-a314-4030-9d51-929993ab6b14] Destroying the VM {{(pid=62405) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 2143.281766] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-1e9cb9a1-4d89-46ce-803c-3909dc79d944 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2143.287387] env[62405]: DEBUG oslo_vmware.api [None req-64bbb6d5-3719-4309-a598-8a972bf729d3 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for the task: (returnval){ [ 2143.287387] env[62405]: value = "task-1948327" [ 2143.287387] env[62405]: _type = "Task" [ 2143.287387] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2143.294812] env[62405]: DEBUG oslo_vmware.api [None req-64bbb6d5-3719-4309-a598-8a972bf729d3 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948327, 'name': Destroy_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2143.630210] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3ae72715-4a0d-4e16-839f-b2a9bac1177d tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Lock "f4af587c-08d3-457e-a20d-a5ea8aad311f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.062s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2143.797262] env[62405]: DEBUG oslo_vmware.api [None req-64bbb6d5-3719-4309-a598-8a972bf729d3 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948327, 'name': Destroy_Task} progress is 33%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2144.034394] env[62405]: DEBUG nova.compute.manager [req-a6562887-7964-418e-a618-0361d893e018 req-e0a89c0d-7eef-4135-8f57-9010de6d5cee service nova] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Received event network-changed-9dc5f509-f8ba-495b-8931-0591e98d462c {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2144.034587] env[62405]: DEBUG nova.compute.manager [req-a6562887-7964-418e-a618-0361d893e018 req-e0a89c0d-7eef-4135-8f57-9010de6d5cee service nova] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Refreshing instance network info cache due to event network-changed-9dc5f509-f8ba-495b-8931-0591e98d462c. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 2144.034821] env[62405]: DEBUG oslo_concurrency.lockutils [req-a6562887-7964-418e-a618-0361d893e018 req-e0a89c0d-7eef-4135-8f57-9010de6d5cee service nova] Acquiring lock "refresh_cache-f4af587c-08d3-457e-a20d-a5ea8aad311f" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2144.034966] env[62405]: DEBUG oslo_concurrency.lockutils [req-a6562887-7964-418e-a618-0361d893e018 req-e0a89c0d-7eef-4135-8f57-9010de6d5cee service nova] Acquired lock "refresh_cache-f4af587c-08d3-457e-a20d-a5ea8aad311f" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2144.035143] env[62405]: DEBUG nova.network.neutron [req-a6562887-7964-418e-a618-0361d893e018 req-e0a89c0d-7eef-4135-8f57-9010de6d5cee service nova] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Refreshing network info cache for port 9dc5f509-f8ba-495b-8931-0591e98d462c {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2144.298620] env[62405]: DEBUG oslo_vmware.api [None req-64bbb6d5-3719-4309-a598-8a972bf729d3 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948327, 'name': Destroy_Task} progress is 33%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2144.795534] env[62405]: DEBUG nova.network.neutron [req-a6562887-7964-418e-a618-0361d893e018 req-e0a89c0d-7eef-4135-8f57-9010de6d5cee service nova] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Updated VIF entry in instance network info cache for port 9dc5f509-f8ba-495b-8931-0591e98d462c. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2144.795927] env[62405]: DEBUG nova.network.neutron [req-a6562887-7964-418e-a618-0361d893e018 req-e0a89c0d-7eef-4135-8f57-9010de6d5cee service nova] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Updating instance_info_cache with network_info: [{"id": "9dc5f509-f8ba-495b-8931-0591e98d462c", "address": "fa:16:3e:21:aa:b0", "network": {"id": "a7be14dc-1fb5-4a85-a574-ff7086958535", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-359688506-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.249", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03a423f493034065bb1591d14d215ed8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77aa121f-8fb6-42f3-aaea-43addfe449b2", "external-id": "nsx-vlan-transportzone-288", "segmentation_id": 288, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9dc5f509-f8", "ovs_interfaceid": "9dc5f509-f8ba-495b-8931-0591e98d462c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2144.800629] env[62405]: DEBUG oslo_vmware.api [None req-64bbb6d5-3719-4309-a598-8a972bf729d3 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948327, 'name': Destroy_Task, 'duration_secs': 1.047578} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2144.801134] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-64bbb6d5-3719-4309-a598-8a972bf729d3 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: fd311606-a314-4030-9d51-929993ab6b14] Destroyed the VM [ 2144.801417] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-64bbb6d5-3719-4309-a598-8a972bf729d3 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: fd311606-a314-4030-9d51-929993ab6b14] Deleting Snapshot of the VM instance {{(pid=62405) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 2144.801700] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-555e9469-2919-44b2-8047-d0a9874b4988 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2144.808713] env[62405]: DEBUG oslo_vmware.api [None req-64bbb6d5-3719-4309-a598-8a972bf729d3 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for the task: (returnval){ [ 2144.808713] env[62405]: value = "task-1948329" [ 2144.808713] env[62405]: _type = "Task" [ 2144.808713] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2144.817116] env[62405]: DEBUG oslo_vmware.api [None req-64bbb6d5-3719-4309-a598-8a972bf729d3 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948329, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2145.302730] env[62405]: DEBUG oslo_concurrency.lockutils [req-a6562887-7964-418e-a618-0361d893e018 req-e0a89c0d-7eef-4135-8f57-9010de6d5cee service nova] Releasing lock "refresh_cache-f4af587c-08d3-457e-a20d-a5ea8aad311f" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2145.318207] env[62405]: DEBUG oslo_vmware.api [None req-64bbb6d5-3719-4309-a598-8a972bf729d3 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948329, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2145.819882] env[62405]: DEBUG oslo_vmware.api [None req-64bbb6d5-3719-4309-a598-8a972bf729d3 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948329, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2146.063749] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3abd4047-1756-4c8c-8a5d-3022455f2dd1 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Acquiring lock "8185f9bc-48d5-4cb7-a48d-f744ff704868" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2146.063980] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3abd4047-1756-4c8c-8a5d-3022455f2dd1 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Lock "8185f9bc-48d5-4cb7-a48d-f744ff704868" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2146.064195] env[62405]: INFO nova.compute.manager [None req-3abd4047-1756-4c8c-8a5d-3022455f2dd1 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Shelving [ 2146.320052] env[62405]: DEBUG oslo_vmware.api [None req-64bbb6d5-3719-4309-a598-8a972bf729d3 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948329, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2146.820802] env[62405]: DEBUG oslo_vmware.api [None req-64bbb6d5-3719-4309-a598-8a972bf729d3 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948329, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2147.075905] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-3abd4047-1756-4c8c-8a5d-3022455f2dd1 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2147.076244] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-09887ef3-50b3-40f8-a8fa-3c99bb3f76e7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2147.083502] env[62405]: DEBUG oslo_vmware.api [None req-3abd4047-1756-4c8c-8a5d-3022455f2dd1 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Waiting for the task: (returnval){ [ 2147.083502] env[62405]: value = "task-1948330" [ 2147.083502] env[62405]: _type = "Task" [ 2147.083502] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2147.090941] env[62405]: DEBUG oslo_vmware.api [None req-3abd4047-1756-4c8c-8a5d-3022455f2dd1 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948330, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2147.322938] env[62405]: DEBUG oslo_vmware.api [None req-64bbb6d5-3719-4309-a598-8a972bf729d3 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948329, 'name': RemoveSnapshot_Task, 'duration_secs': 2.340495} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2147.323251] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-64bbb6d5-3719-4309-a598-8a972bf729d3 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: fd311606-a314-4030-9d51-929993ab6b14] Deleted Snapshot of the VM instance {{(pid=62405) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 2147.323524] env[62405]: DEBUG nova.compute.manager [None req-64bbb6d5-3719-4309-a598-8a972bf729d3 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: fd311606-a314-4030-9d51-929993ab6b14] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2147.324431] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-867fa47f-746a-4b44-bf8a-97382e527306 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2147.592827] env[62405]: DEBUG oslo_vmware.api [None req-3abd4047-1756-4c8c-8a5d-3022455f2dd1 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948330, 'name': PowerOffVM_Task, 'duration_secs': 0.155508} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2147.593037] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-3abd4047-1756-4c8c-8a5d-3022455f2dd1 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2147.593799] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a8fd156-6625-4dd8-a824-93de6965f7aa {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2147.611247] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0da16ccf-c31b-40a3-8e38-6135ddb6e76b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2147.836647] env[62405]: INFO nova.compute.manager [None req-64bbb6d5-3719-4309-a598-8a972bf729d3 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: fd311606-a314-4030-9d51-929993ab6b14] Shelve offloading [ 2148.121892] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-3abd4047-1756-4c8c-8a5d-3022455f2dd1 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Creating Snapshot of the VM instance {{(pid=62405) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 2148.122288] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-8f0eab5a-ef30-455e-9b79-d188781d862d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2148.130181] env[62405]: DEBUG oslo_vmware.api [None req-3abd4047-1756-4c8c-8a5d-3022455f2dd1 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Waiting for the task: (returnval){ [ 2148.130181] env[62405]: value = "task-1948331" [ 2148.130181] env[62405]: _type = "Task" [ 2148.130181] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2148.138803] env[62405]: DEBUG oslo_vmware.api [None req-3abd4047-1756-4c8c-8a5d-3022455f2dd1 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948331, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2148.342887] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-64bbb6d5-3719-4309-a598-8a972bf729d3 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: fd311606-a314-4030-9d51-929993ab6b14] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2148.343284] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3fe5d1e2-0ba5-42b7-9535-c6b6bb7a0a24 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2148.351320] env[62405]: DEBUG oslo_vmware.api [None req-64bbb6d5-3719-4309-a598-8a972bf729d3 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for the task: (returnval){ [ 2148.351320] env[62405]: value = "task-1948332" [ 2148.351320] env[62405]: _type = "Task" [ 2148.351320] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2148.359945] env[62405]: DEBUG oslo_vmware.api [None req-64bbb6d5-3719-4309-a598-8a972bf729d3 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948332, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2148.640358] env[62405]: DEBUG oslo_vmware.api [None req-3abd4047-1756-4c8c-8a5d-3022455f2dd1 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948331, 'name': CreateSnapshot_Task, 'duration_secs': 0.423672} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2148.640679] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-3abd4047-1756-4c8c-8a5d-3022455f2dd1 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Created Snapshot of the VM instance {{(pid=62405) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 2148.641428] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7834a71a-2112-494a-bdbd-73f39c6a89d8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2148.862867] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-64bbb6d5-3719-4309-a598-8a972bf729d3 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: fd311606-a314-4030-9d51-929993ab6b14] VM already powered off {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 2148.863147] env[62405]: DEBUG nova.compute.manager [None req-64bbb6d5-3719-4309-a598-8a972bf729d3 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: fd311606-a314-4030-9d51-929993ab6b14] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2148.863910] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edb33d67-aba7-4407-ac90-5688c1b0c37b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2148.869872] env[62405]: DEBUG oslo_concurrency.lockutils [None req-64bbb6d5-3719-4309-a598-8a972bf729d3 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Acquiring lock "refresh_cache-fd311606-a314-4030-9d51-929993ab6b14" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2148.870057] env[62405]: DEBUG oslo_concurrency.lockutils [None req-64bbb6d5-3719-4309-a598-8a972bf729d3 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Acquired lock "refresh_cache-fd311606-a314-4030-9d51-929993ab6b14" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2148.870237] env[62405]: DEBUG nova.network.neutron [None req-64bbb6d5-3719-4309-a598-8a972bf729d3 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: fd311606-a314-4030-9d51-929993ab6b14] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2149.165309] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-3abd4047-1756-4c8c-8a5d-3022455f2dd1 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Creating linked-clone VM from snapshot {{(pid=62405) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 2149.165806] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-4c253615-51d0-4504-998d-465f194f1c7f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2149.177901] env[62405]: DEBUG oslo_vmware.api [None req-3abd4047-1756-4c8c-8a5d-3022455f2dd1 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Waiting for the task: (returnval){ [ 2149.177901] env[62405]: value = "task-1948333" [ 2149.177901] env[62405]: _type = "Task" [ 2149.177901] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2149.187807] env[62405]: DEBUG oslo_vmware.api [None req-3abd4047-1756-4c8c-8a5d-3022455f2dd1 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948333, 'name': CloneVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2149.603239] env[62405]: DEBUG nova.network.neutron [None req-64bbb6d5-3719-4309-a598-8a972bf729d3 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: fd311606-a314-4030-9d51-929993ab6b14] Updating instance_info_cache with network_info: [{"id": "277de975-3957-41da-9e47-47a0be7e666f", "address": "fa:16:3e:7d:f0:f1", "network": {"id": "3ca0a5a2-a18f-47fa-9d7b-0e27aa414878", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1312490542-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.142", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f3b50cc219314108945bfc8b2c21849a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7edb7c08-2fae-4df5-9ec6-5ccf06d7e337", "external-id": "nsx-vlan-transportzone-309", "segmentation_id": 309, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap277de975-39", "ovs_interfaceid": "277de975-3957-41da-9e47-47a0be7e666f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2149.689761] env[62405]: DEBUG oslo_vmware.api [None req-3abd4047-1756-4c8c-8a5d-3022455f2dd1 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948333, 'name': CloneVM_Task} progress is 94%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2150.106530] env[62405]: DEBUG oslo_concurrency.lockutils [None req-64bbb6d5-3719-4309-a598-8a972bf729d3 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Releasing lock "refresh_cache-fd311606-a314-4030-9d51-929993ab6b14" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2150.188062] env[62405]: DEBUG oslo_vmware.api [None req-3abd4047-1756-4c8c-8a5d-3022455f2dd1 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948333, 'name': CloneVM_Task} progress is 95%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2150.341985] env[62405]: DEBUG nova.compute.manager [req-8fe56475-66fe-4bb3-b55b-bda92318d008 req-74887a01-ce4f-42e7-96a9-adabc386fb23 service nova] [instance: fd311606-a314-4030-9d51-929993ab6b14] Received event network-vif-unplugged-277de975-3957-41da-9e47-47a0be7e666f {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2150.342467] env[62405]: DEBUG oslo_concurrency.lockutils [req-8fe56475-66fe-4bb3-b55b-bda92318d008 req-74887a01-ce4f-42e7-96a9-adabc386fb23 service nova] Acquiring lock "fd311606-a314-4030-9d51-929993ab6b14-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2150.342641] env[62405]: DEBUG oslo_concurrency.lockutils [req-8fe56475-66fe-4bb3-b55b-bda92318d008 req-74887a01-ce4f-42e7-96a9-adabc386fb23 service nova] Lock "fd311606-a314-4030-9d51-929993ab6b14-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2150.342840] env[62405]: DEBUG oslo_concurrency.lockutils [req-8fe56475-66fe-4bb3-b55b-bda92318d008 req-74887a01-ce4f-42e7-96a9-adabc386fb23 service nova] Lock "fd311606-a314-4030-9d51-929993ab6b14-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2150.343108] env[62405]: DEBUG nova.compute.manager [req-8fe56475-66fe-4bb3-b55b-bda92318d008 req-74887a01-ce4f-42e7-96a9-adabc386fb23 service nova] [instance: fd311606-a314-4030-9d51-929993ab6b14] No waiting events found dispatching network-vif-unplugged-277de975-3957-41da-9e47-47a0be7e666f {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2150.343309] env[62405]: WARNING nova.compute.manager [req-8fe56475-66fe-4bb3-b55b-bda92318d008 req-74887a01-ce4f-42e7-96a9-adabc386fb23 service nova] [instance: fd311606-a314-4030-9d51-929993ab6b14] Received unexpected event network-vif-unplugged-277de975-3957-41da-9e47-47a0be7e666f for instance with vm_state shelved and task_state shelving_offloading. [ 2150.508998] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-64bbb6d5-3719-4309-a598-8a972bf729d3 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: fd311606-a314-4030-9d51-929993ab6b14] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2150.509906] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccabb25c-bf91-4be4-934c-4b1b078e06dd {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2150.517497] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-64bbb6d5-3719-4309-a598-8a972bf729d3 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: fd311606-a314-4030-9d51-929993ab6b14] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2150.517728] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-aea0fc05-4fd2-4445-a129-cd9640d51135 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2150.616610] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-64bbb6d5-3719-4309-a598-8a972bf729d3 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: fd311606-a314-4030-9d51-929993ab6b14] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2150.617016] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-64bbb6d5-3719-4309-a598-8a972bf729d3 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: fd311606-a314-4030-9d51-929993ab6b14] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2150.617016] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-64bbb6d5-3719-4309-a598-8a972bf729d3 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Deleting the datastore file [datastore1] fd311606-a314-4030-9d51-929993ab6b14 {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2150.617228] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b86dc745-c5d7-43b0-9b6c-dd5b054ef983 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2150.623629] env[62405]: DEBUG oslo_vmware.api [None req-64bbb6d5-3719-4309-a598-8a972bf729d3 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for the task: (returnval){ [ 2150.623629] env[62405]: value = "task-1948335" [ 2150.623629] env[62405]: _type = "Task" [ 2150.623629] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2150.631223] env[62405]: DEBUG oslo_vmware.api [None req-64bbb6d5-3719-4309-a598-8a972bf729d3 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948335, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2150.688365] env[62405]: DEBUG oslo_vmware.api [None req-3abd4047-1756-4c8c-8a5d-3022455f2dd1 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948333, 'name': CloneVM_Task, 'duration_secs': 1.188435} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2150.688658] env[62405]: INFO nova.virt.vmwareapi.vmops [None req-3abd4047-1756-4c8c-8a5d-3022455f2dd1 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Created linked-clone VM from snapshot [ 2150.689424] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75f38d5d-5f20-4bb7-aa14-9f2230b3b559 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2150.696729] env[62405]: DEBUG nova.virt.vmwareapi.images [None req-3abd4047-1756-4c8c-8a5d-3022455f2dd1 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Uploading image a1697b8d-76e7-4c48-8b43-4dec5af3b5df {{(pid=62405) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 2150.723271] env[62405]: DEBUG oslo_vmware.rw_handles [None req-3abd4047-1756-4c8c-8a5d-3022455f2dd1 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 2150.723271] env[62405]: value = "vm-401609" [ 2150.723271] env[62405]: _type = "VirtualMachine" [ 2150.723271] env[62405]: }. {{(pid=62405) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 2150.723559] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-f81bef66-b2b9-4424-b54a-3b5832e45cd6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2150.731560] env[62405]: DEBUG oslo_vmware.rw_handles [None req-3abd4047-1756-4c8c-8a5d-3022455f2dd1 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Lease: (returnval){ [ 2150.731560] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52b846c6-c529-3181-f5e8-304d428b31e6" [ 2150.731560] env[62405]: _type = "HttpNfcLease" [ 2150.731560] env[62405]: } obtained for exporting VM: (result){ [ 2150.731560] env[62405]: value = "vm-401609" [ 2150.731560] env[62405]: _type = "VirtualMachine" [ 2150.731560] env[62405]: }. {{(pid=62405) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 2150.731863] env[62405]: DEBUG oslo_vmware.api [None req-3abd4047-1756-4c8c-8a5d-3022455f2dd1 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Waiting for the lease: (returnval){ [ 2150.731863] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52b846c6-c529-3181-f5e8-304d428b31e6" [ 2150.731863] env[62405]: _type = "HttpNfcLease" [ 2150.731863] env[62405]: } to be ready. {{(pid=62405) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 2150.738340] env[62405]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2150.738340] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52b846c6-c529-3181-f5e8-304d428b31e6" [ 2150.738340] env[62405]: _type = "HttpNfcLease" [ 2150.738340] env[62405]: } is initializing. {{(pid=62405) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2151.133703] env[62405]: DEBUG oslo_vmware.api [None req-64bbb6d5-3719-4309-a598-8a972bf729d3 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948335, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.15319} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2151.133965] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-64bbb6d5-3719-4309-a598-8a972bf729d3 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2151.134178] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-64bbb6d5-3719-4309-a598-8a972bf729d3 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: fd311606-a314-4030-9d51-929993ab6b14] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2151.134371] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-64bbb6d5-3719-4309-a598-8a972bf729d3 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: fd311606-a314-4030-9d51-929993ab6b14] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2151.156952] env[62405]: INFO nova.scheduler.client.report [None req-64bbb6d5-3719-4309-a598-8a972bf729d3 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Deleted allocations for instance fd311606-a314-4030-9d51-929993ab6b14 [ 2151.239667] env[62405]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2151.239667] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52b846c6-c529-3181-f5e8-304d428b31e6" [ 2151.239667] env[62405]: _type = "HttpNfcLease" [ 2151.239667] env[62405]: } is ready. {{(pid=62405) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 2151.239949] env[62405]: DEBUG oslo_vmware.rw_handles [None req-3abd4047-1756-4c8c-8a5d-3022455f2dd1 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 2151.239949] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52b846c6-c529-3181-f5e8-304d428b31e6" [ 2151.239949] env[62405]: _type = "HttpNfcLease" [ 2151.239949] env[62405]: }. {{(pid=62405) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 2151.240593] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11efdea3-93ce-4c03-894e-c332cf486612 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2151.247549] env[62405]: DEBUG oslo_vmware.rw_handles [None req-3abd4047-1756-4c8c-8a5d-3022455f2dd1 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/523b7b7d-c8a9-43df-38ef-011dd188378d/disk-0.vmdk from lease info. {{(pid=62405) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 2151.247725] env[62405]: DEBUG oslo_vmware.rw_handles [None req-3abd4047-1756-4c8c-8a5d-3022455f2dd1 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/523b7b7d-c8a9-43df-38ef-011dd188378d/disk-0.vmdk for reading. {{(pid=62405) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 2151.333950] env[62405]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-cf935ed2-6ce9-47d9-a173-1a65e52c499c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2151.661952] env[62405]: DEBUG oslo_concurrency.lockutils [None req-64bbb6d5-3719-4309-a598-8a972bf729d3 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2151.662349] env[62405]: DEBUG oslo_concurrency.lockutils [None req-64bbb6d5-3719-4309-a598-8a972bf729d3 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2151.662699] env[62405]: DEBUG nova.objects.instance [None req-64bbb6d5-3719-4309-a598-8a972bf729d3 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Lazy-loading 'resources' on Instance uuid fd311606-a314-4030-9d51-929993ab6b14 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2151.834239] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2151.834497] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2152.165042] env[62405]: DEBUG nova.objects.instance [None req-64bbb6d5-3719-4309-a598-8a972bf729d3 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Lazy-loading 'numa_topology' on Instance uuid fd311606-a314-4030-9d51-929993ab6b14 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2152.340194] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2152.340360] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Starting heal instance info cache {{(pid=62405) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10446}} [ 2152.371199] env[62405]: DEBUG nova.compute.manager [req-8e252c1e-dc4f-401a-870c-6105c3cfe88f req-a2926421-d5af-4690-8054-eff9f173098f service nova] [instance: fd311606-a314-4030-9d51-929993ab6b14] Received event network-changed-277de975-3957-41da-9e47-47a0be7e666f {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2152.371199] env[62405]: DEBUG nova.compute.manager [req-8e252c1e-dc4f-401a-870c-6105c3cfe88f req-a2926421-d5af-4690-8054-eff9f173098f service nova] [instance: fd311606-a314-4030-9d51-929993ab6b14] Refreshing instance network info cache due to event network-changed-277de975-3957-41da-9e47-47a0be7e666f. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 2152.371324] env[62405]: DEBUG oslo_concurrency.lockutils [req-8e252c1e-dc4f-401a-870c-6105c3cfe88f req-a2926421-d5af-4690-8054-eff9f173098f service nova] Acquiring lock "refresh_cache-fd311606-a314-4030-9d51-929993ab6b14" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2152.371564] env[62405]: DEBUG oslo_concurrency.lockutils [req-8e252c1e-dc4f-401a-870c-6105c3cfe88f req-a2926421-d5af-4690-8054-eff9f173098f service nova] Acquired lock "refresh_cache-fd311606-a314-4030-9d51-929993ab6b14" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2152.371883] env[62405]: DEBUG nova.network.neutron [req-8e252c1e-dc4f-401a-870c-6105c3cfe88f req-a2926421-d5af-4690-8054-eff9f173098f service nova] [instance: fd311606-a314-4030-9d51-929993ab6b14] Refreshing network info cache for port 277de975-3957-41da-9e47-47a0be7e666f {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2152.668750] env[62405]: DEBUG nova.objects.base [None req-64bbb6d5-3719-4309-a598-8a972bf729d3 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=62405) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2152.751019] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0c445a7-13a5-4ff7-a22e-ad790535643d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2152.758999] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01734144-7e09-4735-97a7-77681ea5f7a1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2152.789646] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-633808e7-9d17-4225-b3a6-9bbb2f6d38f3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2152.797600] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a994b075-828a-412b-b315-3589d2a200dd {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2152.812474] env[62405]: DEBUG nova.compute.provider_tree [None req-64bbb6d5-3719-4309-a598-8a972bf729d3 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2153.018538] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Acquiring lock "fd311606-a314-4030-9d51-929993ab6b14" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2153.146863] env[62405]: DEBUG nova.network.neutron [req-8e252c1e-dc4f-401a-870c-6105c3cfe88f req-a2926421-d5af-4690-8054-eff9f173098f service nova] [instance: fd311606-a314-4030-9d51-929993ab6b14] Updated VIF entry in instance network info cache for port 277de975-3957-41da-9e47-47a0be7e666f. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2153.147291] env[62405]: DEBUG nova.network.neutron [req-8e252c1e-dc4f-401a-870c-6105c3cfe88f req-a2926421-d5af-4690-8054-eff9f173098f service nova] [instance: fd311606-a314-4030-9d51-929993ab6b14] Updating instance_info_cache with network_info: [{"id": "277de975-3957-41da-9e47-47a0be7e666f", "address": "fa:16:3e:7d:f0:f1", "network": {"id": "3ca0a5a2-a18f-47fa-9d7b-0e27aa414878", "bridge": null, "label": "tempest-ServerActionsTestOtherB-1312490542-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.142", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f3b50cc219314108945bfc8b2c21849a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap277de975-39", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2153.315437] env[62405]: DEBUG nova.scheduler.client.report [None req-64bbb6d5-3719-4309-a598-8a972bf729d3 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2153.650814] env[62405]: DEBUG oslo_concurrency.lockutils [req-8e252c1e-dc4f-401a-870c-6105c3cfe88f req-a2926421-d5af-4690-8054-eff9f173098f service nova] Releasing lock "refresh_cache-fd311606-a314-4030-9d51-929993ab6b14" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2153.821294] env[62405]: DEBUG oslo_concurrency.lockutils [None req-64bbb6d5-3719-4309-a598-8a972bf729d3 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.159s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2154.330916] env[62405]: DEBUG oslo_concurrency.lockutils [None req-64bbb6d5-3719-4309-a598-8a972bf729d3 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Lock "fd311606-a314-4030-9d51-929993ab6b14" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 24.952s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2154.331875] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Lock "fd311606-a314-4030-9d51-929993ab6b14" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 1.313s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2154.332306] env[62405]: INFO nova.compute.manager [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: fd311606-a314-4030-9d51-929993ab6b14] Unshelving [ 2155.359103] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2155.359385] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2155.359608] env[62405]: DEBUG nova.objects.instance [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Lazy-loading 'pci_requests' on Instance uuid fd311606-a314-4030-9d51-929993ab6b14 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2155.862954] env[62405]: DEBUG nova.objects.instance [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Lazy-loading 'numa_topology' on Instance uuid fd311606-a314-4030-9d51-929993ab6b14 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2156.365226] env[62405]: INFO nova.compute.claims [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: fd311606-a314-4030-9d51-929993ab6b14] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2156.862488] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Didn't find any instances for network info cache update. {{(pid=62405) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10532}} [ 2156.862692] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2156.862826] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2156.862967] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2156.863149] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2156.863298] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2156.863447] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2156.863602] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62405) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11065}} [ 2156.863753] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager.update_available_resource {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2157.367117] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2157.464425] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-329e1911-63d2-4132-adcb-3b5795abf1b0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2157.471427] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44a0052e-583f-4700-8a59-55a9197af0e3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2157.501529] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38ac2822-b3a2-4d13-87c5-e7e49e92a75f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2157.509426] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac5b35a7-bc82-49eb-a1e5-b9a67c1001b1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2157.522904] env[62405]: DEBUG nova.compute.provider_tree [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2158.026320] env[62405]: DEBUG nova.scheduler.client.report [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2158.133174] env[62405]: DEBUG oslo_concurrency.lockutils [None req-18d3f79c-cf07-412c-87e6-6c2659d77a5a tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Acquiring lock "60ccb9f6-29ba-44eb-8cec-0d9b78c235ec" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2158.133446] env[62405]: DEBUG oslo_concurrency.lockutils [None req-18d3f79c-cf07-412c-87e6-6c2659d77a5a tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Lock "60ccb9f6-29ba-44eb-8cec-0d9b78c235ec" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2158.133660] env[62405]: DEBUG oslo_concurrency.lockutils [None req-18d3f79c-cf07-412c-87e6-6c2659d77a5a tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Acquiring lock "60ccb9f6-29ba-44eb-8cec-0d9b78c235ec-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2158.133848] env[62405]: DEBUG oslo_concurrency.lockutils [None req-18d3f79c-cf07-412c-87e6-6c2659d77a5a tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Lock "60ccb9f6-29ba-44eb-8cec-0d9b78c235ec-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2158.134027] env[62405]: DEBUG oslo_concurrency.lockutils [None req-18d3f79c-cf07-412c-87e6-6c2659d77a5a tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Lock "60ccb9f6-29ba-44eb-8cec-0d9b78c235ec-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2158.136238] env[62405]: INFO nova.compute.manager [None req-18d3f79c-cf07-412c-87e6-6c2659d77a5a tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec] Terminating instance [ 2158.533038] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.173s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2158.535364] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 1.168s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2158.535546] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2158.535709] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62405) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2158.536722] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d111dcf-c6b0-4715-be89-1b9e8b9f086f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2158.545325] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51d920b2-17cc-420d-92f2-7d1061eb80dc {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2158.559460] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d15bd1c7-e477-42f3-804d-5320e7de8ced {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2158.566298] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7103a4c-abd6-4aec-a021-a3f06f171b51 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2158.570321] env[62405]: INFO nova.network.neutron [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: fd311606-a314-4030-9d51-929993ab6b14] Updating port 277de975-3957-41da-9e47-47a0be7e666f with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 2158.598141] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179317MB free_disk=171GB free_vcpus=48 pci_devices=None {{(pid=62405) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2158.598404] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2158.598540] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2158.639452] env[62405]: DEBUG nova.compute.manager [None req-18d3f79c-cf07-412c-87e6-6c2659d77a5a tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2158.639693] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-18d3f79c-cf07-412c-87e6-6c2659d77a5a tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2158.640574] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be0708ad-d522-4f76-8624-19d4f81553ad {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2158.648403] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-18d3f79c-cf07-412c-87e6-6c2659d77a5a tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2158.648639] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d952ddcf-a594-4ffc-ab5c-e8337b5a35e6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2158.654422] env[62405]: DEBUG oslo_vmware.api [None req-18d3f79c-cf07-412c-87e6-6c2659d77a5a tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Waiting for the task: (returnval){ [ 2158.654422] env[62405]: value = "task-1948337" [ 2158.654422] env[62405]: _type = "Task" [ 2158.654422] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2158.662016] env[62405]: DEBUG oslo_vmware.api [None req-18d3f79c-cf07-412c-87e6-6c2659d77a5a tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948337, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2159.037752] env[62405]: DEBUG oslo_vmware.rw_handles [None req-3abd4047-1756-4c8c-8a5d-3022455f2dd1 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/523b7b7d-c8a9-43df-38ef-011dd188378d/disk-0.vmdk. {{(pid=62405) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 2159.038718] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e0b5a52-03e2-4c77-836d-b9451b61ed20 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2159.044611] env[62405]: DEBUG oslo_vmware.rw_handles [None req-3abd4047-1756-4c8c-8a5d-3022455f2dd1 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/523b7b7d-c8a9-43df-38ef-011dd188378d/disk-0.vmdk is in state: ready. {{(pid=62405) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 2159.044780] env[62405]: ERROR oslo_vmware.rw_handles [None req-3abd4047-1756-4c8c-8a5d-3022455f2dd1 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/523b7b7d-c8a9-43df-38ef-011dd188378d/disk-0.vmdk due to incomplete transfer. [ 2159.044988] env[62405]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-2e79e71a-ee5f-4f09-a38c-6e7385b2d08d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2159.052294] env[62405]: DEBUG oslo_vmware.rw_handles [None req-3abd4047-1756-4c8c-8a5d-3022455f2dd1 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/523b7b7d-c8a9-43df-38ef-011dd188378d/disk-0.vmdk. {{(pid=62405) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 2159.052489] env[62405]: DEBUG nova.virt.vmwareapi.images [None req-3abd4047-1756-4c8c-8a5d-3022455f2dd1 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Uploaded image a1697b8d-76e7-4c48-8b43-4dec5af3b5df to the Glance image server {{(pid=62405) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 2159.054737] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-3abd4047-1756-4c8c-8a5d-3022455f2dd1 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Destroying the VM {{(pid=62405) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 2159.055042] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-abe42e0b-2ef3-4938-8faa-3106751ce86e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2159.060742] env[62405]: DEBUG oslo_vmware.api [None req-3abd4047-1756-4c8c-8a5d-3022455f2dd1 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Waiting for the task: (returnval){ [ 2159.060742] env[62405]: value = "task-1948338" [ 2159.060742] env[62405]: _type = "Task" [ 2159.060742] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2159.067838] env[62405]: DEBUG oslo_vmware.api [None req-3abd4047-1756-4c8c-8a5d-3022455f2dd1 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948338, 'name': Destroy_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2159.164824] env[62405]: DEBUG oslo_vmware.api [None req-18d3f79c-cf07-412c-87e6-6c2659d77a5a tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948337, 'name': PowerOffVM_Task, 'duration_secs': 0.211035} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2159.165103] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-18d3f79c-cf07-412c-87e6-6c2659d77a5a tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2159.165278] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-18d3f79c-cf07-412c-87e6-6c2659d77a5a tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2159.165522] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-091adc1c-8938-4b0d-a143-b4e190bf70f7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2159.571151] env[62405]: DEBUG oslo_vmware.api [None req-3abd4047-1756-4c8c-8a5d-3022455f2dd1 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948338, 'name': Destroy_Task} progress is 33%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2159.611987] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-18d3f79c-cf07-412c-87e6-6c2659d77a5a tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2159.611987] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-18d3f79c-cf07-412c-87e6-6c2659d77a5a tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2159.611987] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-18d3f79c-cf07-412c-87e6-6c2659d77a5a tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Deleting the datastore file [datastore1] 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2159.612566] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fbb94f1b-4ca7-454c-ac4c-b66b0320ba29 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2159.620053] env[62405]: DEBUG oslo_vmware.api [None req-18d3f79c-cf07-412c-87e6-6c2659d77a5a tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Waiting for the task: (returnval){ [ 2159.620053] env[62405]: value = "task-1948340" [ 2159.620053] env[62405]: _type = "Task" [ 2159.620053] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2159.628540] env[62405]: DEBUG oslo_vmware.api [None req-18d3f79c-cf07-412c-87e6-6c2659d77a5a tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948340, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2159.629638] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance b495f9e6-60c8-4509-a34f-2e7ed59b6d82 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2159.629874] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 46b794f6-e858-45e6-9977-98ab246482f3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2159.630099] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2159.630291] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 8185f9bc-48d5-4cb7-a48d-f744ff704868 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2159.630502] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance f4af587c-08d3-457e-a20d-a5ea8aad311f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2159.630679] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance fd311606-a314-4030-9d51-929993ab6b14 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2159.630934] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Total usable vcpus: 48, total allocated vcpus: 6 {{(pid=62405) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2159.631174] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1664MB phys_disk=200GB used_disk=6GB total_vcpus=48 used_vcpus=6 pci_stats=[] {{(pid=62405) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2159.710264] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9a83929-4bc0-46ea-9c80-1609d60c66d8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2159.717605] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a274f010-b8ab-4ec5-98a6-6c5ca3dfd221 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2159.747313] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32651c84-0a04-42a0-a7b8-c3e212810b40 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2159.754789] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32a9f883-66c1-433b-92cb-c1b00f0e8438 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2159.767592] env[62405]: DEBUG nova.compute.provider_tree [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2160.073234] env[62405]: DEBUG oslo_vmware.api [None req-3abd4047-1756-4c8c-8a5d-3022455f2dd1 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948338, 'name': Destroy_Task, 'duration_secs': 0.667681} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2160.073519] env[62405]: INFO nova.virt.vmwareapi.vm_util [None req-3abd4047-1756-4c8c-8a5d-3022455f2dd1 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Destroyed the VM [ 2160.073802] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-3abd4047-1756-4c8c-8a5d-3022455f2dd1 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Deleting Snapshot of the VM instance {{(pid=62405) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 2160.074095] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-dae7add6-bc5c-40ec-a38a-3e74c6eb4a26 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2160.080985] env[62405]: DEBUG oslo_vmware.api [None req-3abd4047-1756-4c8c-8a5d-3022455f2dd1 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Waiting for the task: (returnval){ [ 2160.080985] env[62405]: value = "task-1948341" [ 2160.080985] env[62405]: _type = "Task" [ 2160.080985] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2160.088729] env[62405]: DEBUG oslo_vmware.api [None req-3abd4047-1756-4c8c-8a5d-3022455f2dd1 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948341, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2160.090856] env[62405]: DEBUG nova.compute.manager [req-b1aac336-07c1-4151-9d3b-f50e1506a197 req-26786b32-fe95-48e1-8eda-d4c9a2b36cdb service nova] [instance: fd311606-a314-4030-9d51-929993ab6b14] Received event network-vif-plugged-277de975-3957-41da-9e47-47a0be7e666f {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2160.091081] env[62405]: DEBUG oslo_concurrency.lockutils [req-b1aac336-07c1-4151-9d3b-f50e1506a197 req-26786b32-fe95-48e1-8eda-d4c9a2b36cdb service nova] Acquiring lock "fd311606-a314-4030-9d51-929993ab6b14-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2160.091290] env[62405]: DEBUG oslo_concurrency.lockutils [req-b1aac336-07c1-4151-9d3b-f50e1506a197 req-26786b32-fe95-48e1-8eda-d4c9a2b36cdb service nova] Lock "fd311606-a314-4030-9d51-929993ab6b14-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2160.091456] env[62405]: DEBUG oslo_concurrency.lockutils [req-b1aac336-07c1-4151-9d3b-f50e1506a197 req-26786b32-fe95-48e1-8eda-d4c9a2b36cdb service nova] Lock "fd311606-a314-4030-9d51-929993ab6b14-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2160.091625] env[62405]: DEBUG nova.compute.manager [req-b1aac336-07c1-4151-9d3b-f50e1506a197 req-26786b32-fe95-48e1-8eda-d4c9a2b36cdb service nova] [instance: fd311606-a314-4030-9d51-929993ab6b14] No waiting events found dispatching network-vif-plugged-277de975-3957-41da-9e47-47a0be7e666f {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2160.091792] env[62405]: WARNING nova.compute.manager [req-b1aac336-07c1-4151-9d3b-f50e1506a197 req-26786b32-fe95-48e1-8eda-d4c9a2b36cdb service nova] [instance: fd311606-a314-4030-9d51-929993ab6b14] Received unexpected event network-vif-plugged-277de975-3957-41da-9e47-47a0be7e666f for instance with vm_state shelved_offloaded and task_state spawning. [ 2160.129018] env[62405]: DEBUG oslo_vmware.api [None req-18d3f79c-cf07-412c-87e6-6c2659d77a5a tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948340, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.206113} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2160.129286] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-18d3f79c-cf07-412c-87e6-6c2659d77a5a tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2160.129473] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-18d3f79c-cf07-412c-87e6-6c2659d77a5a tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2160.129654] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-18d3f79c-cf07-412c-87e6-6c2659d77a5a tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2160.129826] env[62405]: INFO nova.compute.manager [None req-18d3f79c-cf07-412c-87e6-6c2659d77a5a tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec] Took 1.49 seconds to destroy the instance on the hypervisor. [ 2160.130074] env[62405]: DEBUG oslo.service.loopingcall [None req-18d3f79c-cf07-412c-87e6-6c2659d77a5a tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2160.130260] env[62405]: DEBUG nova.compute.manager [-] [instance: 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2160.130350] env[62405]: DEBUG nova.network.neutron [-] [instance: 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2160.186127] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Acquiring lock "refresh_cache-fd311606-a314-4030-9d51-929993ab6b14" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2160.186417] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Acquired lock "refresh_cache-fd311606-a314-4030-9d51-929993ab6b14" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2160.186630] env[62405]: DEBUG nova.network.neutron [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: fd311606-a314-4030-9d51-929993ab6b14] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2160.271337] env[62405]: DEBUG nova.scheduler.client.report [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2160.577035] env[62405]: DEBUG nova.compute.manager [req-ff461adf-0b27-495d-97c4-e4e7b5f70c3e req-b1d80a0b-69f8-4cb4-a128-7d90ccec052a service nova] [instance: 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec] Received event network-vif-deleted-c50bcbe3-9e1f-4a25-a53f-4753d9b04dfe {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2160.577319] env[62405]: INFO nova.compute.manager [req-ff461adf-0b27-495d-97c4-e4e7b5f70c3e req-b1d80a0b-69f8-4cb4-a128-7d90ccec052a service nova] [instance: 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec] Neutron deleted interface c50bcbe3-9e1f-4a25-a53f-4753d9b04dfe; detaching it from the instance and deleting it from the info cache [ 2160.577440] env[62405]: DEBUG nova.network.neutron [req-ff461adf-0b27-495d-97c4-e4e7b5f70c3e req-b1d80a0b-69f8-4cb4-a128-7d90ccec052a service nova] [instance: 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2160.591769] env[62405]: DEBUG oslo_vmware.api [None req-3abd4047-1756-4c8c-8a5d-3022455f2dd1 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948341, 'name': RemoveSnapshot_Task, 'duration_secs': 0.354679} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2160.592034] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-3abd4047-1756-4c8c-8a5d-3022455f2dd1 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Deleted Snapshot of the VM instance {{(pid=62405) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 2160.592311] env[62405]: DEBUG nova.compute.manager [None req-3abd4047-1756-4c8c-8a5d-3022455f2dd1 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2160.593057] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67fc9f98-9ca5-4832-ba63-01d2c664e3d3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2160.776645] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62405) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2160.776853] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.178s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2160.903897] env[62405]: DEBUG nova.network.neutron [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: fd311606-a314-4030-9d51-929993ab6b14] Updating instance_info_cache with network_info: [{"id": "277de975-3957-41da-9e47-47a0be7e666f", "address": "fa:16:3e:7d:f0:f1", "network": {"id": "3ca0a5a2-a18f-47fa-9d7b-0e27aa414878", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1312490542-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.142", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f3b50cc219314108945bfc8b2c21849a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7edb7c08-2fae-4df5-9ec6-5ccf06d7e337", "external-id": "nsx-vlan-transportzone-309", "segmentation_id": 309, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap277de975-39", "ovs_interfaceid": "277de975-3957-41da-9e47-47a0be7e666f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2161.054490] env[62405]: DEBUG nova.network.neutron [-] [instance: 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2161.079933] env[62405]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7facebf4-9d1d-440a-9c46-4dd54f30f2a8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2161.089699] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf92bdef-8586-4f99-9855-848487ef7aa3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2161.104639] env[62405]: INFO nova.compute.manager [None req-3abd4047-1756-4c8c-8a5d-3022455f2dd1 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Shelve offloading [ 2161.116846] env[62405]: DEBUG nova.compute.manager [req-ff461adf-0b27-495d-97c4-e4e7b5f70c3e req-b1d80a0b-69f8-4cb4-a128-7d90ccec052a service nova] [instance: 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec] Detach interface failed, port_id=c50bcbe3-9e1f-4a25-a53f-4753d9b04dfe, reason: Instance 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec could not be found. {{(pid=62405) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11483}} [ 2161.406520] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Releasing lock "refresh_cache-fd311606-a314-4030-9d51-929993ab6b14" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2161.433762] env[62405]: DEBUG nova.virt.hardware [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='071d5fd86746999a66168ffd782f8db3',container_format='bare',created_at=2024-12-21T03:32:09Z,direct_url=,disk_format='vmdk',id=532d41d8-81a7-4af2-a07d-498462c3c81d,min_disk=1,min_ram=0,name='tempest-ServerActionsTestOtherB-server-1957151903-shelved',owner='f3b50cc219314108945bfc8b2c21849a',properties=ImageMetaProps,protected=,size=31669760,status='active',tags=,updated_at=2024-12-21T03:32:24Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2161.434033] env[62405]: DEBUG nova.virt.hardware [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2161.434196] env[62405]: DEBUG nova.virt.hardware [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2161.434377] env[62405]: DEBUG nova.virt.hardware [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2161.434525] env[62405]: DEBUG nova.virt.hardware [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2161.434673] env[62405]: DEBUG nova.virt.hardware [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2161.434883] env[62405]: DEBUG nova.virt.hardware [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2161.435056] env[62405]: DEBUG nova.virt.hardware [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2161.435227] env[62405]: DEBUG nova.virt.hardware [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2161.435391] env[62405]: DEBUG nova.virt.hardware [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2161.435561] env[62405]: DEBUG nova.virt.hardware [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2161.436466] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f499576-58a9-4163-8152-9bb674798446 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2161.444435] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fba2b747-504e-4e39-9885-dd9895b15ce0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2161.457674] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: fd311606-a314-4030-9d51-929993ab6b14] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7d:f0:f1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7edb7c08-2fae-4df5-9ec6-5ccf06d7e337', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '277de975-3957-41da-9e47-47a0be7e666f', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2161.464965] env[62405]: DEBUG oslo.service.loopingcall [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2161.465217] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fd311606-a314-4030-9d51-929993ab6b14] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2161.465420] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7c152306-a388-46f4-927d-e8c05fc13850 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2161.483646] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2161.483646] env[62405]: value = "task-1948342" [ 2161.483646] env[62405]: _type = "Task" [ 2161.483646] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2161.490947] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1948342, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2161.556623] env[62405]: INFO nova.compute.manager [-] [instance: 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec] Took 1.43 seconds to deallocate network for instance. [ 2161.608312] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-3abd4047-1756-4c8c-8a5d-3022455f2dd1 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2161.608649] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-56547c72-011f-4653-9681-685a3ca13659 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2161.616029] env[62405]: DEBUG oslo_vmware.api [None req-3abd4047-1756-4c8c-8a5d-3022455f2dd1 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Waiting for the task: (returnval){ [ 2161.616029] env[62405]: value = "task-1948343" [ 2161.616029] env[62405]: _type = "Task" [ 2161.616029] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2161.625367] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-3abd4047-1756-4c8c-8a5d-3022455f2dd1 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] VM already powered off {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 2161.625584] env[62405]: DEBUG nova.compute.manager [None req-3abd4047-1756-4c8c-8a5d-3022455f2dd1 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2161.626312] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-276d3466-647a-4077-ac62-4b688d4cdd1c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2161.631941] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3abd4047-1756-4c8c-8a5d-3022455f2dd1 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Acquiring lock "refresh_cache-8185f9bc-48d5-4cb7-a48d-f744ff704868" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2161.632137] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3abd4047-1756-4c8c-8a5d-3022455f2dd1 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Acquired lock "refresh_cache-8185f9bc-48d5-4cb7-a48d-f744ff704868" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2161.632311] env[62405]: DEBUG nova.network.neutron [None req-3abd4047-1756-4c8c-8a5d-3022455f2dd1 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2161.994852] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1948342, 'name': CreateVM_Task, 'duration_secs': 0.313948} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2161.995261] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fd311606-a314-4030-9d51-929993ab6b14] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2161.996725] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/532d41d8-81a7-4af2-a07d-498462c3c81d" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2161.996725] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Acquired lock "[datastore1] devstack-image-cache_base/532d41d8-81a7-4af2-a07d-498462c3c81d" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2161.996725] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/532d41d8-81a7-4af2-a07d-498462c3c81d" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2161.996904] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fdcf2707-04ff-4aaf-ad61-a18c9d520021 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2162.001928] env[62405]: DEBUG oslo_vmware.api [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for the task: (returnval){ [ 2162.001928] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5230c3da-f4f7-3013-359f-a2d15f9fc22e" [ 2162.001928] env[62405]: _type = "Task" [ 2162.001928] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2162.009401] env[62405]: DEBUG oslo_vmware.api [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5230c3da-f4f7-3013-359f-a2d15f9fc22e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2162.062655] env[62405]: DEBUG oslo_concurrency.lockutils [None req-18d3f79c-cf07-412c-87e6-6c2659d77a5a tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2162.062916] env[62405]: DEBUG oslo_concurrency.lockutils [None req-18d3f79c-cf07-412c-87e6-6c2659d77a5a tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2162.063181] env[62405]: DEBUG nova.objects.instance [None req-18d3f79c-cf07-412c-87e6-6c2659d77a5a tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Lazy-loading 'resources' on Instance uuid 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2162.112184] env[62405]: DEBUG nova.compute.manager [req-815a6cb6-06d1-4101-895f-5e9c77c96d96 req-32f0c95e-a32a-4afb-aaf2-f7f21067fe08 service nova] [instance: fd311606-a314-4030-9d51-929993ab6b14] Received event network-changed-277de975-3957-41da-9e47-47a0be7e666f {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2162.112378] env[62405]: DEBUG nova.compute.manager [req-815a6cb6-06d1-4101-895f-5e9c77c96d96 req-32f0c95e-a32a-4afb-aaf2-f7f21067fe08 service nova] [instance: fd311606-a314-4030-9d51-929993ab6b14] Refreshing instance network info cache due to event network-changed-277de975-3957-41da-9e47-47a0be7e666f. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 2162.112587] env[62405]: DEBUG oslo_concurrency.lockutils [req-815a6cb6-06d1-4101-895f-5e9c77c96d96 req-32f0c95e-a32a-4afb-aaf2-f7f21067fe08 service nova] Acquiring lock "refresh_cache-fd311606-a314-4030-9d51-929993ab6b14" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2162.112731] env[62405]: DEBUG oslo_concurrency.lockutils [req-815a6cb6-06d1-4101-895f-5e9c77c96d96 req-32f0c95e-a32a-4afb-aaf2-f7f21067fe08 service nova] Acquired lock "refresh_cache-fd311606-a314-4030-9d51-929993ab6b14" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2162.112892] env[62405]: DEBUG nova.network.neutron [req-815a6cb6-06d1-4101-895f-5e9c77c96d96 req-32f0c95e-a32a-4afb-aaf2-f7f21067fe08 service nova] [instance: fd311606-a314-4030-9d51-929993ab6b14] Refreshing network info cache for port 277de975-3957-41da-9e47-47a0be7e666f {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2162.349539] env[62405]: DEBUG nova.network.neutron [None req-3abd4047-1756-4c8c-8a5d-3022455f2dd1 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Updating instance_info_cache with network_info: [{"id": "9ccf45be-5a2c-4a79-862c-d1b26508863f", "address": "fa:16:3e:44:07:1c", "network": {"id": "feb681f7-0a8f-4000-89ec-88a027ee7f15", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-478938422-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.193", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28cfe90f16b140018a5802c02f751d9c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c894ab55-c869-4530-9702-cb46d173ce94", "external-id": "nsx-vlan-transportzone-792", "segmentation_id": 792, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9ccf45be-5a", "ovs_interfaceid": "9ccf45be-5a2c-4a79-862c-d1b26508863f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2162.512911] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Releasing lock "[datastore1] devstack-image-cache_base/532d41d8-81a7-4af2-a07d-498462c3c81d" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2162.513201] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: fd311606-a314-4030-9d51-929993ab6b14] Processing image 532d41d8-81a7-4af2-a07d-498462c3c81d {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2162.513437] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/532d41d8-81a7-4af2-a07d-498462c3c81d/532d41d8-81a7-4af2-a07d-498462c3c81d.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2162.513582] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Acquired lock "[datastore1] devstack-image-cache_base/532d41d8-81a7-4af2-a07d-498462c3c81d/532d41d8-81a7-4af2-a07d-498462c3c81d.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2162.513815] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2162.514013] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-710227b9-d154-4af5-8c79-e50f50a5ebb8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2162.522418] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2162.522588] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2162.523300] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-67bc8846-00e5-49ab-912c-1ea6ef2e07b2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2162.527888] env[62405]: DEBUG oslo_vmware.api [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for the task: (returnval){ [ 2162.527888] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]529c683a-edbb-e034-7a70-d95fc8a4c7a2" [ 2162.527888] env[62405]: _type = "Task" [ 2162.527888] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2162.535065] env[62405]: DEBUG oslo_vmware.api [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]529c683a-edbb-e034-7a70-d95fc8a4c7a2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2162.655555] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46b825cd-4ce2-47e5-86e0-2b1747038608 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2162.664395] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86a01e7e-f5fa-4eb1-a8f8-330d25b054e7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2162.712801] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8afd63b6-e2de-405e-9fd5-74f242aaeeb5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2162.722663] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b06dbb4b-0e2a-4a71-8b1a-51eae7df4d3b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2162.742239] env[62405]: DEBUG nova.compute.provider_tree [None req-18d3f79c-cf07-412c-87e6-6c2659d77a5a tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2162.852811] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3abd4047-1756-4c8c-8a5d-3022455f2dd1 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Releasing lock "refresh_cache-8185f9bc-48d5-4cb7-a48d-f744ff704868" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2162.875212] env[62405]: DEBUG nova.network.neutron [req-815a6cb6-06d1-4101-895f-5e9c77c96d96 req-32f0c95e-a32a-4afb-aaf2-f7f21067fe08 service nova] [instance: fd311606-a314-4030-9d51-929993ab6b14] Updated VIF entry in instance network info cache for port 277de975-3957-41da-9e47-47a0be7e666f. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2162.875592] env[62405]: DEBUG nova.network.neutron [req-815a6cb6-06d1-4101-895f-5e9c77c96d96 req-32f0c95e-a32a-4afb-aaf2-f7f21067fe08 service nova] [instance: fd311606-a314-4030-9d51-929993ab6b14] Updating instance_info_cache with network_info: [{"id": "277de975-3957-41da-9e47-47a0be7e666f", "address": "fa:16:3e:7d:f0:f1", "network": {"id": "3ca0a5a2-a18f-47fa-9d7b-0e27aa414878", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1312490542-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.142", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f3b50cc219314108945bfc8b2c21849a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7edb7c08-2fae-4df5-9ec6-5ccf06d7e337", "external-id": "nsx-vlan-transportzone-309", "segmentation_id": 309, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap277de975-39", "ovs_interfaceid": "277de975-3957-41da-9e47-47a0be7e666f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2163.038205] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: fd311606-a314-4030-9d51-929993ab6b14] Preparing fetch location {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2163.038467] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: fd311606-a314-4030-9d51-929993ab6b14] Fetch image to [datastore1] OSTACK_IMG_c995a926-2316-46dd-89db-5f47ae681d3b/OSTACK_IMG_c995a926-2316-46dd-89db-5f47ae681d3b.vmdk {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2163.038655] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: fd311606-a314-4030-9d51-929993ab6b14] Downloading stream optimized image 532d41d8-81a7-4af2-a07d-498462c3c81d to [datastore1] OSTACK_IMG_c995a926-2316-46dd-89db-5f47ae681d3b/OSTACK_IMG_c995a926-2316-46dd-89db-5f47ae681d3b.vmdk on the data store datastore1 as vApp {{(pid=62405) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 2163.038859] env[62405]: DEBUG nova.virt.vmwareapi.images [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: fd311606-a314-4030-9d51-929993ab6b14] Downloading image file data 532d41d8-81a7-4af2-a07d-498462c3c81d to the ESX as VM named 'OSTACK_IMG_c995a926-2316-46dd-89db-5f47ae681d3b' {{(pid=62405) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 2163.110376] env[62405]: DEBUG oslo_vmware.rw_handles [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 2163.110376] env[62405]: value = "resgroup-9" [ 2163.110376] env[62405]: _type = "ResourcePool" [ 2163.110376] env[62405]: }. {{(pid=62405) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 2163.110674] env[62405]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-09598006-fd21-4ed1-8995-e05a23f4052f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2163.135513] env[62405]: DEBUG oslo_vmware.rw_handles [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Lease: (returnval){ [ 2163.135513] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52a2e7b5-9db4-54ad-d401-6d6696bf5867" [ 2163.135513] env[62405]: _type = "HttpNfcLease" [ 2163.135513] env[62405]: } obtained for vApp import into resource pool (val){ [ 2163.135513] env[62405]: value = "resgroup-9" [ 2163.135513] env[62405]: _type = "ResourcePool" [ 2163.135513] env[62405]: }. {{(pid=62405) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 2163.135873] env[62405]: DEBUG oslo_vmware.api [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for the lease: (returnval){ [ 2163.135873] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52a2e7b5-9db4-54ad-d401-6d6696bf5867" [ 2163.135873] env[62405]: _type = "HttpNfcLease" [ 2163.135873] env[62405]: } to be ready. {{(pid=62405) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 2163.142394] env[62405]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2163.142394] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52a2e7b5-9db4-54ad-d401-6d6696bf5867" [ 2163.142394] env[62405]: _type = "HttpNfcLease" [ 2163.142394] env[62405]: } is initializing. {{(pid=62405) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2163.220475] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-3abd4047-1756-4c8c-8a5d-3022455f2dd1 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2163.221718] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9786cce5-7e5d-4f9b-b9f3-86ea84fcab71 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2163.231647] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-3abd4047-1756-4c8c-8a5d-3022455f2dd1 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2163.232035] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7f737b8a-ea43-4612-917e-ca141df11ab9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2163.245297] env[62405]: DEBUG nova.scheduler.client.report [None req-18d3f79c-cf07-412c-87e6-6c2659d77a5a tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2163.378539] env[62405]: DEBUG oslo_concurrency.lockutils [req-815a6cb6-06d1-4101-895f-5e9c77c96d96 req-32f0c95e-a32a-4afb-aaf2-f7f21067fe08 service nova] Releasing lock "refresh_cache-fd311606-a314-4030-9d51-929993ab6b14" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2163.644417] env[62405]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2163.644417] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52a2e7b5-9db4-54ad-d401-6d6696bf5867" [ 2163.644417] env[62405]: _type = "HttpNfcLease" [ 2163.644417] env[62405]: } is ready. {{(pid=62405) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 2163.644795] env[62405]: DEBUG oslo_vmware.rw_handles [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 2163.644795] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52a2e7b5-9db4-54ad-d401-6d6696bf5867" [ 2163.644795] env[62405]: _type = "HttpNfcLease" [ 2163.644795] env[62405]: }. {{(pid=62405) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 2163.645350] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26ad22f1-7110-4251-8866-f3a97426f294 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2163.652334] env[62405]: DEBUG oslo_vmware.rw_handles [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52cf61de-dd65-a311-9b7c-0ca03b08c1d1/disk-0.vmdk from lease info. {{(pid=62405) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 2163.652513] env[62405]: DEBUG oslo_vmware.rw_handles [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Creating HTTP connection to write to file with size = 31669760 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52cf61de-dd65-a311-9b7c-0ca03b08c1d1/disk-0.vmdk. {{(pid=62405) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2163.713845] env[62405]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-86c49697-81dd-4f35-bdad-9ec4658cbbfd {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2163.750468] env[62405]: DEBUG oslo_concurrency.lockutils [None req-18d3f79c-cf07-412c-87e6-6c2659d77a5a tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.687s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2163.769672] env[62405]: INFO nova.scheduler.client.report [None req-18d3f79c-cf07-412c-87e6-6c2659d77a5a tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Deleted allocations for instance 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec [ 2164.051782] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-3abd4047-1756-4c8c-8a5d-3022455f2dd1 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2164.052062] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-3abd4047-1756-4c8c-8a5d-3022455f2dd1 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2164.052224] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-3abd4047-1756-4c8c-8a5d-3022455f2dd1 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Deleting the datastore file [datastore1] 8185f9bc-48d5-4cb7-a48d-f744ff704868 {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2164.052493] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9e9ad762-8f93-4c4f-ba90-3a0a0511a398 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2164.060083] env[62405]: DEBUG oslo_vmware.api [None req-3abd4047-1756-4c8c-8a5d-3022455f2dd1 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Waiting for the task: (returnval){ [ 2164.060083] env[62405]: value = "task-1948346" [ 2164.060083] env[62405]: _type = "Task" [ 2164.060083] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2164.069940] env[62405]: DEBUG oslo_vmware.api [None req-3abd4047-1756-4c8c-8a5d-3022455f2dd1 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948346, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2164.148440] env[62405]: DEBUG nova.compute.manager [req-97fd5e40-945b-43e3-9ebd-e0147889a262 req-e19f4914-b972-4312-9554-57cf5343e0d5 service nova] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Received event network-vif-unplugged-9ccf45be-5a2c-4a79-862c-d1b26508863f {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2164.148679] env[62405]: DEBUG oslo_concurrency.lockutils [req-97fd5e40-945b-43e3-9ebd-e0147889a262 req-e19f4914-b972-4312-9554-57cf5343e0d5 service nova] Acquiring lock "8185f9bc-48d5-4cb7-a48d-f744ff704868-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2164.148925] env[62405]: DEBUG oslo_concurrency.lockutils [req-97fd5e40-945b-43e3-9ebd-e0147889a262 req-e19f4914-b972-4312-9554-57cf5343e0d5 service nova] Lock "8185f9bc-48d5-4cb7-a48d-f744ff704868-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2164.149123] env[62405]: DEBUG oslo_concurrency.lockutils [req-97fd5e40-945b-43e3-9ebd-e0147889a262 req-e19f4914-b972-4312-9554-57cf5343e0d5 service nova] Lock "8185f9bc-48d5-4cb7-a48d-f744ff704868-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2164.149305] env[62405]: DEBUG nova.compute.manager [req-97fd5e40-945b-43e3-9ebd-e0147889a262 req-e19f4914-b972-4312-9554-57cf5343e0d5 service nova] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] No waiting events found dispatching network-vif-unplugged-9ccf45be-5a2c-4a79-862c-d1b26508863f {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2164.149464] env[62405]: WARNING nova.compute.manager [req-97fd5e40-945b-43e3-9ebd-e0147889a262 req-e19f4914-b972-4312-9554-57cf5343e0d5 service nova] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Received unexpected event network-vif-unplugged-9ccf45be-5a2c-4a79-862c-d1b26508863f for instance with vm_state shelved and task_state shelving_offloading. [ 2164.149819] env[62405]: DEBUG nova.compute.manager [req-97fd5e40-945b-43e3-9ebd-e0147889a262 req-e19f4914-b972-4312-9554-57cf5343e0d5 service nova] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Received event network-changed-9ccf45be-5a2c-4a79-862c-d1b26508863f {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2164.149819] env[62405]: DEBUG nova.compute.manager [req-97fd5e40-945b-43e3-9ebd-e0147889a262 req-e19f4914-b972-4312-9554-57cf5343e0d5 service nova] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Refreshing instance network info cache due to event network-changed-9ccf45be-5a2c-4a79-862c-d1b26508863f. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 2164.149965] env[62405]: DEBUG oslo_concurrency.lockutils [req-97fd5e40-945b-43e3-9ebd-e0147889a262 req-e19f4914-b972-4312-9554-57cf5343e0d5 service nova] Acquiring lock "refresh_cache-8185f9bc-48d5-4cb7-a48d-f744ff704868" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2164.150106] env[62405]: DEBUG oslo_concurrency.lockutils [req-97fd5e40-945b-43e3-9ebd-e0147889a262 req-e19f4914-b972-4312-9554-57cf5343e0d5 service nova] Acquired lock "refresh_cache-8185f9bc-48d5-4cb7-a48d-f744ff704868" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2164.150268] env[62405]: DEBUG nova.network.neutron [req-97fd5e40-945b-43e3-9ebd-e0147889a262 req-e19f4914-b972-4312-9554-57cf5343e0d5 service nova] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Refreshing network info cache for port 9ccf45be-5a2c-4a79-862c-d1b26508863f {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2164.279528] env[62405]: DEBUG oslo_concurrency.lockutils [None req-18d3f79c-cf07-412c-87e6-6c2659d77a5a tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Lock "60ccb9f6-29ba-44eb-8cec-0d9b78c235ec" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.146s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2164.570563] env[62405]: DEBUG oslo_vmware.api [None req-3abd4047-1756-4c8c-8a5d-3022455f2dd1 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948346, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.14274} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2164.572700] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-3abd4047-1756-4c8c-8a5d-3022455f2dd1 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2164.572700] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-3abd4047-1756-4c8c-8a5d-3022455f2dd1 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2164.572700] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-3abd4047-1756-4c8c-8a5d-3022455f2dd1 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2164.595023] env[62405]: INFO nova.scheduler.client.report [None req-3abd4047-1756-4c8c-8a5d-3022455f2dd1 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Deleted allocations for instance 8185f9bc-48d5-4cb7-a48d-f744ff704868 [ 2164.790823] env[62405]: DEBUG oslo_vmware.rw_handles [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Completed reading data from the image iterator. {{(pid=62405) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2164.791161] env[62405]: DEBUG oslo_vmware.rw_handles [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52cf61de-dd65-a311-9b7c-0ca03b08c1d1/disk-0.vmdk. {{(pid=62405) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 2164.792104] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-792bf343-7d34-40a6-88f3-5f44997e2e69 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2164.798614] env[62405]: DEBUG oslo_vmware.rw_handles [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52cf61de-dd65-a311-9b7c-0ca03b08c1d1/disk-0.vmdk is in state: ready. {{(pid=62405) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 2164.798801] env[62405]: DEBUG oslo_vmware.rw_handles [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Releasing lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52cf61de-dd65-a311-9b7c-0ca03b08c1d1/disk-0.vmdk. {{(pid=62405) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 2164.799044] env[62405]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-223f5b8f-c7d5-4225-a67e-174b39a3fbd1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2164.981213] env[62405]: DEBUG nova.network.neutron [req-97fd5e40-945b-43e3-9ebd-e0147889a262 req-e19f4914-b972-4312-9554-57cf5343e0d5 service nova] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Updated VIF entry in instance network info cache for port 9ccf45be-5a2c-4a79-862c-d1b26508863f. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2164.981600] env[62405]: DEBUG nova.network.neutron [req-97fd5e40-945b-43e3-9ebd-e0147889a262 req-e19f4914-b972-4312-9554-57cf5343e0d5 service nova] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Updating instance_info_cache with network_info: [{"id": "9ccf45be-5a2c-4a79-862c-d1b26508863f", "address": "fa:16:3e:44:07:1c", "network": {"id": "feb681f7-0a8f-4000-89ec-88a027ee7f15", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-478938422-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.193", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28cfe90f16b140018a5802c02f751d9c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap9ccf45be-5a", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2165.015212] env[62405]: DEBUG oslo_vmware.rw_handles [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Closed VMDK write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52cf61de-dd65-a311-9b7c-0ca03b08c1d1/disk-0.vmdk. {{(pid=62405) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 2165.015212] env[62405]: INFO nova.virt.vmwareapi.images [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: fd311606-a314-4030-9d51-929993ab6b14] Downloaded image file data 532d41d8-81a7-4af2-a07d-498462c3c81d [ 2165.015905] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e084ffe8-66d2-450a-8de3-68a5c093ffee {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2165.033200] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-de9643b5-ccc2-4193-9ee2-169a1442693e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2165.078037] env[62405]: INFO nova.virt.vmwareapi.images [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: fd311606-a314-4030-9d51-929993ab6b14] The imported VM was unregistered [ 2165.080452] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: fd311606-a314-4030-9d51-929993ab6b14] Caching image {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2165.080696] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Creating directory with path [datastore1] devstack-image-cache_base/532d41d8-81a7-4af2-a07d-498462c3c81d {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2165.081046] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-00db08e7-5c3b-4aee-adaa-03d0567ddf91 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2165.091837] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Created directory with path [datastore1] devstack-image-cache_base/532d41d8-81a7-4af2-a07d-498462c3c81d {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2165.092060] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_c995a926-2316-46dd-89db-5f47ae681d3b/OSTACK_IMG_c995a926-2316-46dd-89db-5f47ae681d3b.vmdk to [datastore1] devstack-image-cache_base/532d41d8-81a7-4af2-a07d-498462c3c81d/532d41d8-81a7-4af2-a07d-498462c3c81d.vmdk. {{(pid=62405) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 2165.092311] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-f02a82c2-9cbc-4aac-b530-cf7d18c27bc6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2165.098427] env[62405]: DEBUG oslo_vmware.api [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for the task: (returnval){ [ 2165.098427] env[62405]: value = "task-1948348" [ 2165.098427] env[62405]: _type = "Task" [ 2165.098427] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2165.102602] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3abd4047-1756-4c8c-8a5d-3022455f2dd1 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2165.102851] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3abd4047-1756-4c8c-8a5d-3022455f2dd1 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2165.103078] env[62405]: DEBUG nova.objects.instance [None req-3abd4047-1756-4c8c-8a5d-3022455f2dd1 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Lazy-loading 'resources' on Instance uuid 8185f9bc-48d5-4cb7-a48d-f744ff704868 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2165.108713] env[62405]: DEBUG oslo_vmware.api [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948348, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2165.484596] env[62405]: DEBUG oslo_concurrency.lockutils [req-97fd5e40-945b-43e3-9ebd-e0147889a262 req-e19f4914-b972-4312-9554-57cf5343e0d5 service nova] Releasing lock "refresh_cache-8185f9bc-48d5-4cb7-a48d-f744ff704868" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2165.605377] env[62405]: DEBUG nova.objects.instance [None req-3abd4047-1756-4c8c-8a5d-3022455f2dd1 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Lazy-loading 'numa_topology' on Instance uuid 8185f9bc-48d5-4cb7-a48d-f744ff704868 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2165.609899] env[62405]: DEBUG oslo_vmware.api [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948348, 'name': MoveVirtualDisk_Task} progress is 18%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2166.112077] env[62405]: DEBUG nova.objects.base [None req-3abd4047-1756-4c8c-8a5d-3022455f2dd1 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Object Instance<8185f9bc-48d5-4cb7-a48d-f744ff704868> lazy-loaded attributes: resources,numa_topology {{(pid=62405) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2166.113997] env[62405]: DEBUG oslo_vmware.api [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948348, 'name': MoveVirtualDisk_Task} progress is 43%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2166.190519] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0d1bad7-e965-4693-b39d-7bef41724f9f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2166.200910] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f622b05-ae2d-4c02-84c7-d588bf57a584 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2166.235393] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40096323-c615-43d9-963e-802d53f44151 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2166.245278] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-096a3025-872a-4fb2-85b5-180b5dffbe5c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2166.263314] env[62405]: DEBUG nova.compute.provider_tree [None req-3abd4047-1756-4c8c-8a5d-3022455f2dd1 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2166.291161] env[62405]: DEBUG nova.compute.manager [None req-2d163aaf-ae7e-407e-a64b-078c3ca31cdf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Stashing vm_state: active {{(pid=62405) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 2166.612337] env[62405]: DEBUG oslo_vmware.api [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948348, 'name': MoveVirtualDisk_Task} progress is 66%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2166.767654] env[62405]: DEBUG nova.scheduler.client.report [None req-3abd4047-1756-4c8c-8a5d-3022455f2dd1 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2166.808876] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2d163aaf-ae7e-407e-a64b-078c3ca31cdf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2167.084063] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Acquiring lock "8185f9bc-48d5-4cb7-a48d-f744ff704868" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2167.112435] env[62405]: DEBUG oslo_vmware.api [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948348, 'name': MoveVirtualDisk_Task} progress is 88%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2167.272777] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3abd4047-1756-4c8c-8a5d-3022455f2dd1 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.170s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2167.275644] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2d163aaf-ae7e-407e-a64b-078c3ca31cdf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.467s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2167.610167] env[62405]: DEBUG oslo_vmware.api [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948348, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.282893} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2167.610450] env[62405]: INFO nova.virt.vmwareapi.ds_util [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_c995a926-2316-46dd-89db-5f47ae681d3b/OSTACK_IMG_c995a926-2316-46dd-89db-5f47ae681d3b.vmdk to [datastore1] devstack-image-cache_base/532d41d8-81a7-4af2-a07d-498462c3c81d/532d41d8-81a7-4af2-a07d-498462c3c81d.vmdk. [ 2167.610639] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: fd311606-a314-4030-9d51-929993ab6b14] Cleaning up location [datastore1] OSTACK_IMG_c995a926-2316-46dd-89db-5f47ae681d3b {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 2167.610863] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_c995a926-2316-46dd-89db-5f47ae681d3b {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2167.611099] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-334ee2d2-50ff-494f-b20f-5e3abc06a662 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.617488] env[62405]: DEBUG oslo_vmware.api [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for the task: (returnval){ [ 2167.617488] env[62405]: value = "task-1948349" [ 2167.617488] env[62405]: _type = "Task" [ 2167.617488] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2167.624710] env[62405]: DEBUG oslo_vmware.api [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948349, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2167.782458] env[62405]: INFO nova.compute.claims [None req-2d163aaf-ae7e-407e-a64b-078c3ca31cdf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2167.786967] env[62405]: DEBUG oslo_concurrency.lockutils [None req-3abd4047-1756-4c8c-8a5d-3022455f2dd1 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Lock "8185f9bc-48d5-4cb7-a48d-f744ff704868" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 21.723s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2167.787747] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Lock "8185f9bc-48d5-4cb7-a48d-f744ff704868" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 0.704s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2167.787928] env[62405]: INFO nova.compute.manager [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Unshelving [ 2168.128795] env[62405]: DEBUG oslo_vmware.api [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948349, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.034861} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2168.129194] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2168.129239] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Releasing lock "[datastore1] devstack-image-cache_base/532d41d8-81a7-4af2-a07d-498462c3c81d/532d41d8-81a7-4af2-a07d-498462c3c81d.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2168.129446] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/532d41d8-81a7-4af2-a07d-498462c3c81d/532d41d8-81a7-4af2-a07d-498462c3c81d.vmdk to [datastore1] fd311606-a314-4030-9d51-929993ab6b14/fd311606-a314-4030-9d51-929993ab6b14.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2168.129705] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fd9b7472-c5f4-4335-8999-d31d710b08f5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2168.137098] env[62405]: DEBUG oslo_vmware.api [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for the task: (returnval){ [ 2168.137098] env[62405]: value = "task-1948350" [ 2168.137098] env[62405]: _type = "Task" [ 2168.137098] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2168.144729] env[62405]: DEBUG oslo_vmware.api [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948350, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2168.289551] env[62405]: INFO nova.compute.resource_tracker [None req-2d163aaf-ae7e-407e-a64b-078c3ca31cdf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Updating resource usage from migration 2b817578-8342-441e-a433-69c477465cdd [ 2168.387020] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12962399-8c63-4780-8ca5-afbacdc450cb {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2168.395659] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75514565-dc9d-47b8-8747-c870aedfb56d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2168.435169] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-148b1e97-58af-4a67-9b71-0b7d1a9bcfb8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2168.445548] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ed2b6f5-e607-41ae-9362-01bc86190689 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2168.462551] env[62405]: DEBUG nova.compute.provider_tree [None req-2d163aaf-ae7e-407e-a64b-078c3ca31cdf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2168.646960] env[62405]: DEBUG oslo_vmware.api [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948350, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2168.814150] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2168.965775] env[62405]: DEBUG nova.scheduler.client.report [None req-2d163aaf-ae7e-407e-a64b-078c3ca31cdf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2169.151492] env[62405]: DEBUG oslo_vmware.api [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948350, 'name': CopyVirtualDisk_Task} progress is 9%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2169.470810] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2d163aaf-ae7e-407e-a64b-078c3ca31cdf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.195s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2169.471207] env[62405]: INFO nova.compute.manager [None req-2d163aaf-ae7e-407e-a64b-078c3ca31cdf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Migrating [ 2169.478050] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.664s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2169.478302] env[62405]: DEBUG nova.objects.instance [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Lazy-loading 'pci_requests' on Instance uuid 8185f9bc-48d5-4cb7-a48d-f744ff704868 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2169.489337] env[62405]: DEBUG nova.objects.instance [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Lazy-loading 'numa_topology' on Instance uuid 8185f9bc-48d5-4cb7-a48d-f744ff704868 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2169.652368] env[62405]: DEBUG oslo_vmware.api [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948350, 'name': CopyVirtualDisk_Task} progress is 26%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2169.988675] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2d163aaf-ae7e-407e-a64b-078c3ca31cdf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Acquiring lock "refresh_cache-b495f9e6-60c8-4509-a34f-2e7ed59b6d82" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2169.988898] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2d163aaf-ae7e-407e-a64b-078c3ca31cdf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Acquired lock "refresh_cache-b495f9e6-60c8-4509-a34f-2e7ed59b6d82" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2169.989151] env[62405]: DEBUG nova.network.neutron [None req-2d163aaf-ae7e-407e-a64b-078c3ca31cdf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2169.992299] env[62405]: INFO nova.compute.claims [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2170.151137] env[62405]: DEBUG oslo_vmware.api [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948350, 'name': CopyVirtualDisk_Task} progress is 49%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2170.652781] env[62405]: DEBUG oslo_vmware.api [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948350, 'name': CopyVirtualDisk_Task} progress is 74%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2170.829265] env[62405]: DEBUG nova.network.neutron [None req-2d163aaf-ae7e-407e-a64b-078c3ca31cdf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Updating instance_info_cache with network_info: [{"id": "a7c7d269-027f-42d9-819a-e04ab445d816", "address": "fa:16:3e:c7:9c:e0", "network": {"id": "f9883b88-e1ff-4499-9214-4cc672383a10", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1580742860-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dd9a1a4650b34e388c50c7575cf09a7c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0a88b707-352e-4be7-b1d6-ad6074b40ed9", "external-id": "nsx-vlan-transportzone-789", "segmentation_id": 789, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa7c7d269-02", "ovs_interfaceid": "a7c7d269-027f-42d9-819a-e04ab445d816", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2171.094028] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b679c9c2-31dd-42fd-ad39-9c984cdba347 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2171.104156] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b444af47-a35d-4e48-bc52-0aa8950872f0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2171.138465] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b4a9bbc-0e9d-4565-8150-588d9f0cdfb8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2171.149373] env[62405]: DEBUG oslo_vmware.api [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948350, 'name': CopyVirtualDisk_Task} progress is 97%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2171.152589] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c3453a4-8754-4675-96d8-fd1c0301a68d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2171.168271] env[62405]: DEBUG nova.compute.provider_tree [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2171.331765] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2d163aaf-ae7e-407e-a64b-078c3ca31cdf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Releasing lock "refresh_cache-b495f9e6-60c8-4509-a34f-2e7ed59b6d82" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2171.651996] env[62405]: DEBUG oslo_vmware.api [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948350, 'name': CopyVirtualDisk_Task, 'duration_secs': 3.090125} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2171.651996] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/532d41d8-81a7-4af2-a07d-498462c3c81d/532d41d8-81a7-4af2-a07d-498462c3c81d.vmdk to [datastore1] fd311606-a314-4030-9d51-929993ab6b14/fd311606-a314-4030-9d51-929993ab6b14.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2171.652316] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2b8846c-6ba5-4838-aedc-351453e6e611 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2171.673629] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: fd311606-a314-4030-9d51-929993ab6b14] Reconfiguring VM instance instance-00000075 to attach disk [datastore1] fd311606-a314-4030-9d51-929993ab6b14/fd311606-a314-4030-9d51-929993ab6b14.vmdk or device None with type streamOptimized {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2171.674554] env[62405]: DEBUG nova.scheduler.client.report [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2171.677446] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2db9d62e-179f-4fb3-8148-a1849f8005d7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2171.697258] env[62405]: DEBUG oslo_vmware.api [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for the task: (returnval){ [ 2171.697258] env[62405]: value = "task-1948351" [ 2171.697258] env[62405]: _type = "Task" [ 2171.697258] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2171.704986] env[62405]: DEBUG oslo_vmware.api [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948351, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2172.192679] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.715s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2172.207015] env[62405]: DEBUG oslo_vmware.api [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948351, 'name': ReconfigVM_Task, 'duration_secs': 0.325778} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2172.207298] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: fd311606-a314-4030-9d51-929993ab6b14] Reconfigured VM instance instance-00000075 to attach disk [datastore1] fd311606-a314-4030-9d51-929993ab6b14/fd311606-a314-4030-9d51-929993ab6b14.vmdk or device None with type streamOptimized {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2172.207908] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f38be060-8e81-4f90-bb2f-420f46d05f79 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2172.214055] env[62405]: DEBUG oslo_vmware.api [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for the task: (returnval){ [ 2172.214055] env[62405]: value = "task-1948352" [ 2172.214055] env[62405]: _type = "Task" [ 2172.214055] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2172.223544] env[62405]: DEBUG oslo_vmware.api [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948352, 'name': Rename_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2172.224283] env[62405]: INFO nova.network.neutron [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Updating port 9ccf45be-5a2c-4a79-862c-d1b26508863f with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 2172.723982] env[62405]: DEBUG oslo_vmware.api [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948352, 'name': Rename_Task, 'duration_secs': 0.191728} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2172.724272] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: fd311606-a314-4030-9d51-929993ab6b14] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2172.724524] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-68b9b877-dfb9-4c77-9c95-ae27a1be01ce {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2172.731117] env[62405]: DEBUG oslo_vmware.api [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for the task: (returnval){ [ 2172.731117] env[62405]: value = "task-1948353" [ 2172.731117] env[62405]: _type = "Task" [ 2172.731117] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2172.738299] env[62405]: DEBUG oslo_vmware.api [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948353, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2172.847567] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1115d088-8ada-48eb-97f9-3f00ada08894 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2172.866570] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-2d163aaf-ae7e-407e-a64b-078c3ca31cdf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Updating instance 'b495f9e6-60c8-4509-a34f-2e7ed59b6d82' progress to 0 {{(pid=62405) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2173.241381] env[62405]: DEBUG oslo_vmware.api [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948353, 'name': PowerOnVM_Task, 'duration_secs': 0.481558} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2173.241695] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: fd311606-a314-4030-9d51-929993ab6b14] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2173.341908] env[62405]: DEBUG nova.compute.manager [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: fd311606-a314-4030-9d51-929993ab6b14] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2173.342891] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c60a00b-6357-42e9-88a3-9e20c76401e9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2173.372205] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d163aaf-ae7e-407e-a64b-078c3ca31cdf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2173.372520] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c45ad22a-2d88-4cbe-bf7c-d67217695dc9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2173.379598] env[62405]: DEBUG oslo_vmware.api [None req-2d163aaf-ae7e-407e-a64b-078c3ca31cdf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Waiting for the task: (returnval){ [ 2173.379598] env[62405]: value = "task-1948354" [ 2173.379598] env[62405]: _type = "Task" [ 2173.379598] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2173.387359] env[62405]: DEBUG oslo_vmware.api [None req-2d163aaf-ae7e-407e-a64b-078c3ca31cdf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948354, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2173.616738] env[62405]: DEBUG nova.compute.manager [req-68e06d9e-4dd5-4837-a2da-4e22fddfacc1 req-a3ca03bd-8080-4dfd-a0c8-086a6ecb015d service nova] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Received event network-vif-plugged-9ccf45be-5a2c-4a79-862c-d1b26508863f {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2173.616959] env[62405]: DEBUG oslo_concurrency.lockutils [req-68e06d9e-4dd5-4837-a2da-4e22fddfacc1 req-a3ca03bd-8080-4dfd-a0c8-086a6ecb015d service nova] Acquiring lock "8185f9bc-48d5-4cb7-a48d-f744ff704868-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2173.617440] env[62405]: DEBUG oslo_concurrency.lockutils [req-68e06d9e-4dd5-4837-a2da-4e22fddfacc1 req-a3ca03bd-8080-4dfd-a0c8-086a6ecb015d service nova] Lock "8185f9bc-48d5-4cb7-a48d-f744ff704868-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2173.617624] env[62405]: DEBUG oslo_concurrency.lockutils [req-68e06d9e-4dd5-4837-a2da-4e22fddfacc1 req-a3ca03bd-8080-4dfd-a0c8-086a6ecb015d service nova] Lock "8185f9bc-48d5-4cb7-a48d-f744ff704868-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2173.617817] env[62405]: DEBUG nova.compute.manager [req-68e06d9e-4dd5-4837-a2da-4e22fddfacc1 req-a3ca03bd-8080-4dfd-a0c8-086a6ecb015d service nova] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] No waiting events found dispatching network-vif-plugged-9ccf45be-5a2c-4a79-862c-d1b26508863f {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2173.617997] env[62405]: WARNING nova.compute.manager [req-68e06d9e-4dd5-4837-a2da-4e22fddfacc1 req-a3ca03bd-8080-4dfd-a0c8-086a6ecb015d service nova] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Received unexpected event network-vif-plugged-9ccf45be-5a2c-4a79-862c-d1b26508863f for instance with vm_state shelved_offloaded and task_state spawning. [ 2173.702213] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Acquiring lock "refresh_cache-8185f9bc-48d5-4cb7-a48d-f744ff704868" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2173.702455] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Acquired lock "refresh_cache-8185f9bc-48d5-4cb7-a48d-f744ff704868" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2173.702612] env[62405]: DEBUG nova.network.neutron [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2173.860046] env[62405]: DEBUG oslo_concurrency.lockutils [None req-a4280ef3-21ed-4465-aeb3-494ce67be04e tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Lock "fd311606-a314-4030-9d51-929993ab6b14" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 19.528s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2173.889888] env[62405]: DEBUG oslo_vmware.api [None req-2d163aaf-ae7e-407e-a64b-078c3ca31cdf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948354, 'name': PowerOffVM_Task, 'duration_secs': 0.234299} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2173.890160] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d163aaf-ae7e-407e-a64b-078c3ca31cdf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2173.890362] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-2d163aaf-ae7e-407e-a64b-078c3ca31cdf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Updating instance 'b495f9e6-60c8-4509-a34f-2e7ed59b6d82' progress to 17 {{(pid=62405) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2174.397310] env[62405]: DEBUG nova.virt.hardware [None req-2d163aaf-ae7e-407e-a64b-078c3ca31cdf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:21Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2174.397573] env[62405]: DEBUG nova.virt.hardware [None req-2d163aaf-ae7e-407e-a64b-078c3ca31cdf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2174.397710] env[62405]: DEBUG nova.virt.hardware [None req-2d163aaf-ae7e-407e-a64b-078c3ca31cdf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2174.397891] env[62405]: DEBUG nova.virt.hardware [None req-2d163aaf-ae7e-407e-a64b-078c3ca31cdf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2174.398059] env[62405]: DEBUG nova.virt.hardware [None req-2d163aaf-ae7e-407e-a64b-078c3ca31cdf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2174.398214] env[62405]: DEBUG nova.virt.hardware [None req-2d163aaf-ae7e-407e-a64b-078c3ca31cdf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2174.398423] env[62405]: DEBUG nova.virt.hardware [None req-2d163aaf-ae7e-407e-a64b-078c3ca31cdf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2174.398582] env[62405]: DEBUG nova.virt.hardware [None req-2d163aaf-ae7e-407e-a64b-078c3ca31cdf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2174.398748] env[62405]: DEBUG nova.virt.hardware [None req-2d163aaf-ae7e-407e-a64b-078c3ca31cdf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2174.398966] env[62405]: DEBUG nova.virt.hardware [None req-2d163aaf-ae7e-407e-a64b-078c3ca31cdf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2174.399184] env[62405]: DEBUG nova.virt.hardware [None req-2d163aaf-ae7e-407e-a64b-078c3ca31cdf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2174.404116] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ff48f254-cf2b-4f95-babd-d7ab95964868 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2174.421448] env[62405]: DEBUG oslo_vmware.api [None req-2d163aaf-ae7e-407e-a64b-078c3ca31cdf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Waiting for the task: (returnval){ [ 2174.421448] env[62405]: value = "task-1948355" [ 2174.421448] env[62405]: _type = "Task" [ 2174.421448] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2174.429027] env[62405]: DEBUG oslo_vmware.api [None req-2d163aaf-ae7e-407e-a64b-078c3ca31cdf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948355, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2174.476722] env[62405]: DEBUG nova.network.neutron [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Updating instance_info_cache with network_info: [{"id": "9ccf45be-5a2c-4a79-862c-d1b26508863f", "address": "fa:16:3e:44:07:1c", "network": {"id": "feb681f7-0a8f-4000-89ec-88a027ee7f15", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-478938422-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.193", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28cfe90f16b140018a5802c02f751d9c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c894ab55-c869-4530-9702-cb46d173ce94", "external-id": "nsx-vlan-transportzone-792", "segmentation_id": 792, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9ccf45be-5a", "ovs_interfaceid": "9ccf45be-5a2c-4a79-862c-d1b26508863f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2174.930829] env[62405]: DEBUG oslo_vmware.api [None req-2d163aaf-ae7e-407e-a64b-078c3ca31cdf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948355, 'name': ReconfigVM_Task, 'duration_secs': 0.439624} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2174.931217] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-2d163aaf-ae7e-407e-a64b-078c3ca31cdf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Updating instance 'b495f9e6-60c8-4509-a34f-2e7ed59b6d82' progress to 33 {{(pid=62405) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2174.979844] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Releasing lock "refresh_cache-8185f9bc-48d5-4cb7-a48d-f744ff704868" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2175.003128] env[62405]: DEBUG nova.virt.hardware [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='1d18c9f60b10560f38777c8f8fe53225',container_format='bare',created_at=2024-12-21T03:32:26Z,direct_url=,disk_format='vmdk',id=a1697b8d-76e7-4c48-8b43-4dec5af3b5df,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-1869439400-shelved',owner='28cfe90f16b140018a5802c02f751d9c',properties=ImageMetaProps,protected=,size=31667200,status='active',tags=,updated_at=2024-12-21T03:32:39Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2175.003382] env[62405]: DEBUG nova.virt.hardware [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2175.003541] env[62405]: DEBUG nova.virt.hardware [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2175.003725] env[62405]: DEBUG nova.virt.hardware [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2175.003876] env[62405]: DEBUG nova.virt.hardware [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2175.004090] env[62405]: DEBUG nova.virt.hardware [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2175.004322] env[62405]: DEBUG nova.virt.hardware [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2175.004487] env[62405]: DEBUG nova.virt.hardware [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2175.004657] env[62405]: DEBUG nova.virt.hardware [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2175.004824] env[62405]: DEBUG nova.virt.hardware [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2175.005012] env[62405]: DEBUG nova.virt.hardware [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2175.005859] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32cbfb95-3078-4301-a369-9167ad7d7879 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2175.014225] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6155cd6-80f7-4317-9b11-89cbe713141e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2175.027031] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:44:07:1c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c894ab55-c869-4530-9702-cb46d173ce94', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9ccf45be-5a2c-4a79-862c-d1b26508863f', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2175.034247] env[62405]: DEBUG oslo.service.loopingcall [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2175.034506] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2175.034728] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ae83a4d4-3f92-4aef-818b-e7bd85520016 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2175.054148] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2175.054148] env[62405]: value = "task-1948356" [ 2175.054148] env[62405]: _type = "Task" [ 2175.054148] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2175.061102] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1948356, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2175.147363] env[62405]: DEBUG oslo_concurrency.lockutils [None req-adcd167d-be8a-4252-82c7-8df88b37e8a7 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Acquiring lock "fd311606-a314-4030-9d51-929993ab6b14" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2175.147585] env[62405]: DEBUG oslo_concurrency.lockutils [None req-adcd167d-be8a-4252-82c7-8df88b37e8a7 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Lock "fd311606-a314-4030-9d51-929993ab6b14" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2175.147807] env[62405]: DEBUG oslo_concurrency.lockutils [None req-adcd167d-be8a-4252-82c7-8df88b37e8a7 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Acquiring lock "fd311606-a314-4030-9d51-929993ab6b14-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2175.148039] env[62405]: DEBUG oslo_concurrency.lockutils [None req-adcd167d-be8a-4252-82c7-8df88b37e8a7 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Lock "fd311606-a314-4030-9d51-929993ab6b14-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2175.148235] env[62405]: DEBUG oslo_concurrency.lockutils [None req-adcd167d-be8a-4252-82c7-8df88b37e8a7 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Lock "fd311606-a314-4030-9d51-929993ab6b14-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2175.150454] env[62405]: INFO nova.compute.manager [None req-adcd167d-be8a-4252-82c7-8df88b37e8a7 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: fd311606-a314-4030-9d51-929993ab6b14] Terminating instance [ 2175.437509] env[62405]: DEBUG nova.virt.hardware [None req-2d163aaf-ae7e-407e-a64b-078c3ca31cdf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2175.437742] env[62405]: DEBUG nova.virt.hardware [None req-2d163aaf-ae7e-407e-a64b-078c3ca31cdf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2175.437993] env[62405]: DEBUG nova.virt.hardware [None req-2d163aaf-ae7e-407e-a64b-078c3ca31cdf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2175.438278] env[62405]: DEBUG nova.virt.hardware [None req-2d163aaf-ae7e-407e-a64b-078c3ca31cdf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2175.438470] env[62405]: DEBUG nova.virt.hardware [None req-2d163aaf-ae7e-407e-a64b-078c3ca31cdf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2175.438658] env[62405]: DEBUG nova.virt.hardware [None req-2d163aaf-ae7e-407e-a64b-078c3ca31cdf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2175.438929] env[62405]: DEBUG nova.virt.hardware [None req-2d163aaf-ae7e-407e-a64b-078c3ca31cdf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2175.439174] env[62405]: DEBUG nova.virt.hardware [None req-2d163aaf-ae7e-407e-a64b-078c3ca31cdf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2175.439371] env[62405]: DEBUG nova.virt.hardware [None req-2d163aaf-ae7e-407e-a64b-078c3ca31cdf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2175.439544] env[62405]: DEBUG nova.virt.hardware [None req-2d163aaf-ae7e-407e-a64b-078c3ca31cdf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2175.440156] env[62405]: DEBUG nova.virt.hardware [None req-2d163aaf-ae7e-407e-a64b-078c3ca31cdf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2175.445008] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-2d163aaf-ae7e-407e-a64b-078c3ca31cdf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Reconfiguring VM instance instance-0000005c to detach disk 2000 {{(pid=62405) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2175.445480] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b6248f2c-5732-4ae0-8dc7-7719a28f64f8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2175.463868] env[62405]: DEBUG oslo_vmware.api [None req-2d163aaf-ae7e-407e-a64b-078c3ca31cdf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Waiting for the task: (returnval){ [ 2175.463868] env[62405]: value = "task-1948357" [ 2175.463868] env[62405]: _type = "Task" [ 2175.463868] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2175.471174] env[62405]: DEBUG oslo_vmware.api [None req-2d163aaf-ae7e-407e-a64b-078c3ca31cdf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948357, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2175.563199] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1948356, 'name': CreateVM_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2175.641352] env[62405]: DEBUG nova.compute.manager [req-d391c9bc-4ec7-43a1-8217-ed1f06f43d96 req-af5593fd-78a1-4a74-aadf-82f598342d65 service nova] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Received event network-changed-9ccf45be-5a2c-4a79-862c-d1b26508863f {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2175.641414] env[62405]: DEBUG nova.compute.manager [req-d391c9bc-4ec7-43a1-8217-ed1f06f43d96 req-af5593fd-78a1-4a74-aadf-82f598342d65 service nova] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Refreshing instance network info cache due to event network-changed-9ccf45be-5a2c-4a79-862c-d1b26508863f. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 2175.641651] env[62405]: DEBUG oslo_concurrency.lockutils [req-d391c9bc-4ec7-43a1-8217-ed1f06f43d96 req-af5593fd-78a1-4a74-aadf-82f598342d65 service nova] Acquiring lock "refresh_cache-8185f9bc-48d5-4cb7-a48d-f744ff704868" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2175.641826] env[62405]: DEBUG oslo_concurrency.lockutils [req-d391c9bc-4ec7-43a1-8217-ed1f06f43d96 req-af5593fd-78a1-4a74-aadf-82f598342d65 service nova] Acquired lock "refresh_cache-8185f9bc-48d5-4cb7-a48d-f744ff704868" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2175.642051] env[62405]: DEBUG nova.network.neutron [req-d391c9bc-4ec7-43a1-8217-ed1f06f43d96 req-af5593fd-78a1-4a74-aadf-82f598342d65 service nova] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Refreshing network info cache for port 9ccf45be-5a2c-4a79-862c-d1b26508863f {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2175.656069] env[62405]: DEBUG nova.compute.manager [None req-adcd167d-be8a-4252-82c7-8df88b37e8a7 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: fd311606-a314-4030-9d51-929993ab6b14] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2175.656288] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-adcd167d-be8a-4252-82c7-8df88b37e8a7 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: fd311606-a314-4030-9d51-929993ab6b14] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2175.659119] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ab60b4d-eff0-4224-a3ed-794cd4b2ed7b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2175.667032] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-adcd167d-be8a-4252-82c7-8df88b37e8a7 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: fd311606-a314-4030-9d51-929993ab6b14] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2175.667282] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bf7769f0-86ab-4ff4-889c-d772a5963e46 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2175.673932] env[62405]: DEBUG oslo_vmware.api [None req-adcd167d-be8a-4252-82c7-8df88b37e8a7 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for the task: (returnval){ [ 2175.673932] env[62405]: value = "task-1948358" [ 2175.673932] env[62405]: _type = "Task" [ 2175.673932] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2175.683049] env[62405]: DEBUG oslo_vmware.api [None req-adcd167d-be8a-4252-82c7-8df88b37e8a7 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948358, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2175.973880] env[62405]: DEBUG oslo_vmware.api [None req-2d163aaf-ae7e-407e-a64b-078c3ca31cdf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948357, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2176.064012] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1948356, 'name': CreateVM_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2176.183409] env[62405]: DEBUG oslo_vmware.api [None req-adcd167d-be8a-4252-82c7-8df88b37e8a7 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948358, 'name': PowerOffVM_Task, 'duration_secs': 0.185271} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2176.183671] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-adcd167d-be8a-4252-82c7-8df88b37e8a7 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: fd311606-a314-4030-9d51-929993ab6b14] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2176.183844] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-adcd167d-be8a-4252-82c7-8df88b37e8a7 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: fd311606-a314-4030-9d51-929993ab6b14] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2176.184094] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-66b563f3-1c53-41dc-8f1b-a503cb38b757 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2176.348513] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-adcd167d-be8a-4252-82c7-8df88b37e8a7 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: fd311606-a314-4030-9d51-929993ab6b14] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2176.348791] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-adcd167d-be8a-4252-82c7-8df88b37e8a7 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: fd311606-a314-4030-9d51-929993ab6b14] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2176.348995] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-adcd167d-be8a-4252-82c7-8df88b37e8a7 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Deleting the datastore file [datastore1] fd311606-a314-4030-9d51-929993ab6b14 {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2176.349310] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1a359456-f091-4eee-93d8-65ba6426198a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2176.357178] env[62405]: DEBUG oslo_vmware.api [None req-adcd167d-be8a-4252-82c7-8df88b37e8a7 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for the task: (returnval){ [ 2176.357178] env[62405]: value = "task-1948360" [ 2176.357178] env[62405]: _type = "Task" [ 2176.357178] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2176.361117] env[62405]: DEBUG nova.network.neutron [req-d391c9bc-4ec7-43a1-8217-ed1f06f43d96 req-af5593fd-78a1-4a74-aadf-82f598342d65 service nova] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Updated VIF entry in instance network info cache for port 9ccf45be-5a2c-4a79-862c-d1b26508863f. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2176.361512] env[62405]: DEBUG nova.network.neutron [req-d391c9bc-4ec7-43a1-8217-ed1f06f43d96 req-af5593fd-78a1-4a74-aadf-82f598342d65 service nova] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Updating instance_info_cache with network_info: [{"id": "9ccf45be-5a2c-4a79-862c-d1b26508863f", "address": "fa:16:3e:44:07:1c", "network": {"id": "feb681f7-0a8f-4000-89ec-88a027ee7f15", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-478938422-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.193", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28cfe90f16b140018a5802c02f751d9c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c894ab55-c869-4530-9702-cb46d173ce94", "external-id": "nsx-vlan-transportzone-792", "segmentation_id": 792, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9ccf45be-5a", "ovs_interfaceid": "9ccf45be-5a2c-4a79-862c-d1b26508863f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2176.367476] env[62405]: DEBUG oslo_vmware.api [None req-adcd167d-be8a-4252-82c7-8df88b37e8a7 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948360, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2176.475134] env[62405]: DEBUG oslo_vmware.api [None req-2d163aaf-ae7e-407e-a64b-078c3ca31cdf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948357, 'name': ReconfigVM_Task, 'duration_secs': 0.990846} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2176.475434] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-2d163aaf-ae7e-407e-a64b-078c3ca31cdf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Reconfigured VM instance instance-0000005c to detach disk 2000 {{(pid=62405) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2176.476195] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-deed33ff-f2f7-474f-8b2f-6fdbe1693c87 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2176.498354] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-2d163aaf-ae7e-407e-a64b-078c3ca31cdf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Reconfiguring VM instance instance-0000005c to attach disk [datastore1] b495f9e6-60c8-4509-a34f-2e7ed59b6d82/b495f9e6-60c8-4509-a34f-2e7ed59b6d82.vmdk or device None with type thin {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2176.498611] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b7d915ca-0c5c-46f7-a9b5-9933fff64526 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2176.517484] env[62405]: DEBUG oslo_vmware.api [None req-2d163aaf-ae7e-407e-a64b-078c3ca31cdf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Waiting for the task: (returnval){ [ 2176.517484] env[62405]: value = "task-1948361" [ 2176.517484] env[62405]: _type = "Task" [ 2176.517484] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2176.525126] env[62405]: DEBUG oslo_vmware.api [None req-2d163aaf-ae7e-407e-a64b-078c3ca31cdf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948361, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2176.564594] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1948356, 'name': CreateVM_Task, 'duration_secs': 1.362763} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2176.564772] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2176.565505] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a1697b8d-76e7-4c48-8b43-4dec5af3b5df" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2176.565687] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a1697b8d-76e7-4c48-8b43-4dec5af3b5df" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2176.566100] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a1697b8d-76e7-4c48-8b43-4dec5af3b5df" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2176.566358] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-82ce23d6-ce45-4088-a827-e9c2fe9352e6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2176.570976] env[62405]: DEBUG oslo_vmware.api [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Waiting for the task: (returnval){ [ 2176.570976] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52c4d5ff-d554-4ed4-4e6a-1a4f1fa7f2b5" [ 2176.570976] env[62405]: _type = "Task" [ 2176.570976] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2176.580432] env[62405]: DEBUG oslo_vmware.api [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52c4d5ff-d554-4ed4-4e6a-1a4f1fa7f2b5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2176.866523] env[62405]: DEBUG oslo_concurrency.lockutils [req-d391c9bc-4ec7-43a1-8217-ed1f06f43d96 req-af5593fd-78a1-4a74-aadf-82f598342d65 service nova] Releasing lock "refresh_cache-8185f9bc-48d5-4cb7-a48d-f744ff704868" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2176.866920] env[62405]: DEBUG oslo_vmware.api [None req-adcd167d-be8a-4252-82c7-8df88b37e8a7 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Task: {'id': task-1948360, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.129344} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2176.867169] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-adcd167d-be8a-4252-82c7-8df88b37e8a7 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2176.867377] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-adcd167d-be8a-4252-82c7-8df88b37e8a7 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: fd311606-a314-4030-9d51-929993ab6b14] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2176.867574] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-adcd167d-be8a-4252-82c7-8df88b37e8a7 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: fd311606-a314-4030-9d51-929993ab6b14] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2176.867750] env[62405]: INFO nova.compute.manager [None req-adcd167d-be8a-4252-82c7-8df88b37e8a7 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] [instance: fd311606-a314-4030-9d51-929993ab6b14] Took 1.21 seconds to destroy the instance on the hypervisor. [ 2176.867983] env[62405]: DEBUG oslo.service.loopingcall [None req-adcd167d-be8a-4252-82c7-8df88b37e8a7 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2176.868188] env[62405]: DEBUG nova.compute.manager [-] [instance: fd311606-a314-4030-9d51-929993ab6b14] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2176.868283] env[62405]: DEBUG nova.network.neutron [-] [instance: fd311606-a314-4030-9d51-929993ab6b14] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2177.028435] env[62405]: DEBUG oslo_vmware.api [None req-2d163aaf-ae7e-407e-a64b-078c3ca31cdf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948361, 'name': ReconfigVM_Task, 'duration_secs': 0.244311} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2177.028750] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-2d163aaf-ae7e-407e-a64b-078c3ca31cdf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Reconfigured VM instance instance-0000005c to attach disk [datastore1] b495f9e6-60c8-4509-a34f-2e7ed59b6d82/b495f9e6-60c8-4509-a34f-2e7ed59b6d82.vmdk or device None with type thin {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2177.028961] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-2d163aaf-ae7e-407e-a64b-078c3ca31cdf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Updating instance 'b495f9e6-60c8-4509-a34f-2e7ed59b6d82' progress to 50 {{(pid=62405) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2177.081950] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a1697b8d-76e7-4c48-8b43-4dec5af3b5df" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2177.082257] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Processing image a1697b8d-76e7-4c48-8b43-4dec5af3b5df {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2177.082484] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a1697b8d-76e7-4c48-8b43-4dec5af3b5df/a1697b8d-76e7-4c48-8b43-4dec5af3b5df.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2177.082633] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a1697b8d-76e7-4c48-8b43-4dec5af3b5df/a1697b8d-76e7-4c48-8b43-4dec5af3b5df.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2177.082812] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2177.083073] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3c3ebfda-b144-4dc0-a957-4ff0fb4bee5f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2177.092693] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2177.092884] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2177.093611] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-04d5b00b-0c54-4022-8f2f-7fed7ef8fa2d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2177.099563] env[62405]: DEBUG oslo_vmware.api [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Waiting for the task: (returnval){ [ 2177.099563] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52eac219-f6a8-9ea2-e4bc-02cc03ec06f7" [ 2177.099563] env[62405]: _type = "Task" [ 2177.099563] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2177.106863] env[62405]: DEBUG oslo_vmware.api [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52eac219-f6a8-9ea2-e4bc-02cc03ec06f7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2177.536020] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cff1d47-2515-4e62-8286-caef088f4430 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2177.556779] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce1ba3d3-287d-4639-9cfe-f2e593869df9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2177.576482] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-2d163aaf-ae7e-407e-a64b-078c3ca31cdf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Updating instance 'b495f9e6-60c8-4509-a34f-2e7ed59b6d82' progress to 67 {{(pid=62405) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2177.612120] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Preparing fetch location {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2177.612120] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Fetch image to [datastore1] OSTACK_IMG_4114c1aa-98e6-4ed8-8af0-94fe7821a3c7/OSTACK_IMG_4114c1aa-98e6-4ed8-8af0-94fe7821a3c7.vmdk {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2177.612120] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Downloading stream optimized image a1697b8d-76e7-4c48-8b43-4dec5af3b5df to [datastore1] OSTACK_IMG_4114c1aa-98e6-4ed8-8af0-94fe7821a3c7/OSTACK_IMG_4114c1aa-98e6-4ed8-8af0-94fe7821a3c7.vmdk on the data store datastore1 as vApp {{(pid=62405) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 2177.612120] env[62405]: DEBUG nova.virt.vmwareapi.images [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Downloading image file data a1697b8d-76e7-4c48-8b43-4dec5af3b5df to the ESX as VM named 'OSTACK_IMG_4114c1aa-98e6-4ed8-8af0-94fe7821a3c7' {{(pid=62405) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 2177.665421] env[62405]: DEBUG nova.compute.manager [req-f604da58-6dad-46d5-b495-15799594468d req-adab694f-d3ce-4a9d-90e5-e4ea7d1b91db service nova] [instance: fd311606-a314-4030-9d51-929993ab6b14] Received event network-vif-deleted-277de975-3957-41da-9e47-47a0be7e666f {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2177.666706] env[62405]: INFO nova.compute.manager [req-f604da58-6dad-46d5-b495-15799594468d req-adab694f-d3ce-4a9d-90e5-e4ea7d1b91db service nova] [instance: fd311606-a314-4030-9d51-929993ab6b14] Neutron deleted interface 277de975-3957-41da-9e47-47a0be7e666f; detaching it from the instance and deleting it from the info cache [ 2177.666706] env[62405]: DEBUG nova.network.neutron [req-f604da58-6dad-46d5-b495-15799594468d req-adab694f-d3ce-4a9d-90e5-e4ea7d1b91db service nova] [instance: fd311606-a314-4030-9d51-929993ab6b14] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2177.695466] env[62405]: DEBUG oslo_vmware.rw_handles [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 2177.695466] env[62405]: value = "resgroup-9" [ 2177.695466] env[62405]: _type = "ResourcePool" [ 2177.695466] env[62405]: }. {{(pid=62405) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 2177.695755] env[62405]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-b2503d4f-0a1d-4077-be7b-1f08251d0d32 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2177.719889] env[62405]: DEBUG oslo_vmware.rw_handles [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Lease: (returnval){ [ 2177.719889] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]528cc18f-d834-78a3-9eb0-8fd8d31b6745" [ 2177.719889] env[62405]: _type = "HttpNfcLease" [ 2177.719889] env[62405]: } obtained for vApp import into resource pool (val){ [ 2177.719889] env[62405]: value = "resgroup-9" [ 2177.719889] env[62405]: _type = "ResourcePool" [ 2177.719889] env[62405]: }. {{(pid=62405) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 2177.720415] env[62405]: DEBUG oslo_vmware.api [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Waiting for the lease: (returnval){ [ 2177.720415] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]528cc18f-d834-78a3-9eb0-8fd8d31b6745" [ 2177.720415] env[62405]: _type = "HttpNfcLease" [ 2177.720415] env[62405]: } to be ready. {{(pid=62405) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 2177.725934] env[62405]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2177.725934] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]528cc18f-d834-78a3-9eb0-8fd8d31b6745" [ 2177.725934] env[62405]: _type = "HttpNfcLease" [ 2177.725934] env[62405]: } is initializing. {{(pid=62405) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2178.086059] env[62405]: DEBUG nova.network.neutron [-] [instance: fd311606-a314-4030-9d51-929993ab6b14] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2178.121127] env[62405]: DEBUG nova.network.neutron [None req-2d163aaf-ae7e-407e-a64b-078c3ca31cdf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Port a7c7d269-027f-42d9-819a-e04ab445d816 binding to destination host cpu-1 is already ACTIVE {{(pid=62405) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 2178.168441] env[62405]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7d89a53f-394c-4567-9db7-47efd2daccd4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2178.178244] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4066e6d2-ed14-477e-b7fb-d288ffd80722 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2178.204978] env[62405]: DEBUG nova.compute.manager [req-f604da58-6dad-46d5-b495-15799594468d req-adab694f-d3ce-4a9d-90e5-e4ea7d1b91db service nova] [instance: fd311606-a314-4030-9d51-929993ab6b14] Detach interface failed, port_id=277de975-3957-41da-9e47-47a0be7e666f, reason: Instance fd311606-a314-4030-9d51-929993ab6b14 could not be found. {{(pid=62405) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11483}} [ 2178.226919] env[62405]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2178.226919] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]528cc18f-d834-78a3-9eb0-8fd8d31b6745" [ 2178.226919] env[62405]: _type = "HttpNfcLease" [ 2178.226919] env[62405]: } is ready. {{(pid=62405) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 2178.227237] env[62405]: DEBUG oslo_vmware.rw_handles [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 2178.227237] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]528cc18f-d834-78a3-9eb0-8fd8d31b6745" [ 2178.227237] env[62405]: _type = "HttpNfcLease" [ 2178.227237] env[62405]: }. {{(pid=62405) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 2178.227910] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b2cc94c-4966-41e4-97c6-5cd367b8b665 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2178.234793] env[62405]: DEBUG oslo_vmware.rw_handles [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/528d8f7f-eb32-30f5-22ce-cc12e631a7fb/disk-0.vmdk from lease info. {{(pid=62405) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 2178.234969] env[62405]: DEBUG oslo_vmware.rw_handles [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Creating HTTP connection to write to file with size = 31667200 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/528d8f7f-eb32-30f5-22ce-cc12e631a7fb/disk-0.vmdk. {{(pid=62405) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2178.298198] env[62405]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-e484bc92-6127-42b5-a1c2-09e6e2126c66 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2178.588330] env[62405]: INFO nova.compute.manager [-] [instance: fd311606-a314-4030-9d51-929993ab6b14] Took 1.72 seconds to deallocate network for instance. [ 2179.096523] env[62405]: DEBUG oslo_concurrency.lockutils [None req-adcd167d-be8a-4252-82c7-8df88b37e8a7 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2179.096936] env[62405]: DEBUG oslo_concurrency.lockutils [None req-adcd167d-be8a-4252-82c7-8df88b37e8a7 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2179.097167] env[62405]: DEBUG nova.objects.instance [None req-adcd167d-be8a-4252-82c7-8df88b37e8a7 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Lazy-loading 'resources' on Instance uuid fd311606-a314-4030-9d51-929993ab6b14 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2179.146807] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2d163aaf-ae7e-407e-a64b-078c3ca31cdf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Acquiring lock "b495f9e6-60c8-4509-a34f-2e7ed59b6d82-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2179.147084] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2d163aaf-ae7e-407e-a64b-078c3ca31cdf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Lock "b495f9e6-60c8-4509-a34f-2e7ed59b6d82-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2179.147283] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2d163aaf-ae7e-407e-a64b-078c3ca31cdf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Lock "b495f9e6-60c8-4509-a34f-2e7ed59b6d82-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2179.501282] env[62405]: DEBUG oslo_vmware.rw_handles [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Completed reading data from the image iterator. {{(pid=62405) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2179.501547] env[62405]: DEBUG oslo_vmware.rw_handles [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/528d8f7f-eb32-30f5-22ce-cc12e631a7fb/disk-0.vmdk. {{(pid=62405) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 2179.502514] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61d615a7-3b12-471e-8bc8-007c31099ce7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2179.509077] env[62405]: DEBUG oslo_vmware.rw_handles [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/528d8f7f-eb32-30f5-22ce-cc12e631a7fb/disk-0.vmdk is in state: ready. {{(pid=62405) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 2179.509251] env[62405]: DEBUG oslo_vmware.rw_handles [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Releasing lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/528d8f7f-eb32-30f5-22ce-cc12e631a7fb/disk-0.vmdk. {{(pid=62405) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 2179.509473] env[62405]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-9a8cbce7-c62a-479d-b453-32a765ba7f68 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2179.683128] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea289c34-a1b6-423a-8ebf-7842fb4a0d17 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2179.690642] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-193a7cea-41d6-49ba-80a4-370e679f7ec5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2179.722354] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1093fbbf-0e11-45b0-85d1-ec01d6aed899 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2179.726234] env[62405]: DEBUG oslo_vmware.rw_handles [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Closed VMDK write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/528d8f7f-eb32-30f5-22ce-cc12e631a7fb/disk-0.vmdk. {{(pid=62405) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 2179.726434] env[62405]: INFO nova.virt.vmwareapi.images [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Downloaded image file data a1697b8d-76e7-4c48-8b43-4dec5af3b5df [ 2179.727487] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4db97c55-7a84-4cb8-a102-1d2a2bfae8e6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2179.733504] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc7ec555-9980-42c9-a967-a4c1a9a186bf {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2179.747554] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-19b9bc55-0763-4da2-99bb-e9615790b5ad {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2179.756696] env[62405]: DEBUG nova.compute.provider_tree [None req-adcd167d-be8a-4252-82c7-8df88b37e8a7 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2179.782613] env[62405]: INFO nova.virt.vmwareapi.images [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] The imported VM was unregistered [ 2179.784920] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Caching image {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2179.785170] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Creating directory with path [datastore1] devstack-image-cache_base/a1697b8d-76e7-4c48-8b43-4dec5af3b5df {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2179.785430] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-04a09111-0759-41ba-ad9c-e415c6e99e4c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2179.805467] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Created directory with path [datastore1] devstack-image-cache_base/a1697b8d-76e7-4c48-8b43-4dec5af3b5df {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2179.805657] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_4114c1aa-98e6-4ed8-8af0-94fe7821a3c7/OSTACK_IMG_4114c1aa-98e6-4ed8-8af0-94fe7821a3c7.vmdk to [datastore1] devstack-image-cache_base/a1697b8d-76e7-4c48-8b43-4dec5af3b5df/a1697b8d-76e7-4c48-8b43-4dec5af3b5df.vmdk. {{(pid=62405) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 2179.805903] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-6af86da4-dd57-4560-99fa-b5acb24da04e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2179.813114] env[62405]: DEBUG oslo_vmware.api [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Waiting for the task: (returnval){ [ 2179.813114] env[62405]: value = "task-1948364" [ 2179.813114] env[62405]: _type = "Task" [ 2179.813114] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2179.820737] env[62405]: DEBUG oslo_vmware.api [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948364, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2180.212857] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2d163aaf-ae7e-407e-a64b-078c3ca31cdf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Acquiring lock "refresh_cache-b495f9e6-60c8-4509-a34f-2e7ed59b6d82" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2180.213140] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2d163aaf-ae7e-407e-a64b-078c3ca31cdf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Acquired lock "refresh_cache-b495f9e6-60c8-4509-a34f-2e7ed59b6d82" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2180.213326] env[62405]: DEBUG nova.network.neutron [None req-2d163aaf-ae7e-407e-a64b-078c3ca31cdf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2180.260098] env[62405]: DEBUG nova.scheduler.client.report [None req-adcd167d-be8a-4252-82c7-8df88b37e8a7 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2180.325939] env[62405]: DEBUG oslo_vmware.api [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948364, 'name': MoveVirtualDisk_Task} progress is 18%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2180.764940] env[62405]: DEBUG oslo_concurrency.lockutils [None req-adcd167d-be8a-4252-82c7-8df88b37e8a7 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.668s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2180.784235] env[62405]: INFO nova.scheduler.client.report [None req-adcd167d-be8a-4252-82c7-8df88b37e8a7 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Deleted allocations for instance fd311606-a314-4030-9d51-929993ab6b14 [ 2180.825718] env[62405]: DEBUG oslo_vmware.api [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948364, 'name': MoveVirtualDisk_Task} progress is 40%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2181.063739] env[62405]: DEBUG nova.network.neutron [None req-2d163aaf-ae7e-407e-a64b-078c3ca31cdf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Updating instance_info_cache with network_info: [{"id": "a7c7d269-027f-42d9-819a-e04ab445d816", "address": "fa:16:3e:c7:9c:e0", "network": {"id": "f9883b88-e1ff-4499-9214-4cc672383a10", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1580742860-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dd9a1a4650b34e388c50c7575cf09a7c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0a88b707-352e-4be7-b1d6-ad6074b40ed9", "external-id": "nsx-vlan-transportzone-789", "segmentation_id": 789, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa7c7d269-02", "ovs_interfaceid": "a7c7d269-027f-42d9-819a-e04ab445d816", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2181.160901] env[62405]: DEBUG oslo_concurrency.lockutils [None req-87ce13f8-ee45-4168-9ae6-cbcfc5a883c4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Acquiring lock "f4af587c-08d3-457e-a20d-a5ea8aad311f" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2181.161204] env[62405]: DEBUG oslo_concurrency.lockutils [None req-87ce13f8-ee45-4168-9ae6-cbcfc5a883c4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Lock "f4af587c-08d3-457e-a20d-a5ea8aad311f" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2181.294070] env[62405]: DEBUG oslo_concurrency.lockutils [None req-adcd167d-be8a-4252-82c7-8df88b37e8a7 tempest-ServerActionsTestOtherB-674341360 tempest-ServerActionsTestOtherB-674341360-project-member] Lock "fd311606-a314-4030-9d51-929993ab6b14" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.146s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2181.328906] env[62405]: DEBUG oslo_vmware.api [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948364, 'name': MoveVirtualDisk_Task} progress is 60%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2181.567255] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2d163aaf-ae7e-407e-a64b-078c3ca31cdf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Releasing lock "refresh_cache-b495f9e6-60c8-4509-a34f-2e7ed59b6d82" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2181.664619] env[62405]: DEBUG nova.compute.utils [None req-87ce13f8-ee45-4168-9ae6-cbcfc5a883c4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2181.826609] env[62405]: DEBUG oslo_vmware.api [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948364, 'name': MoveVirtualDisk_Task} progress is 83%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2182.092545] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0df20082-9ccc-4a88-bb85-c6bdbd1800f2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2182.114083] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e54d41bc-94f4-40c4-8757-3b5a23e58876 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2182.123315] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-2d163aaf-ae7e-407e-a64b-078c3ca31cdf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Updating instance 'b495f9e6-60c8-4509-a34f-2e7ed59b6d82' progress to 83 {{(pid=62405) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2182.167582] env[62405]: DEBUG oslo_concurrency.lockutils [None req-87ce13f8-ee45-4168-9ae6-cbcfc5a883c4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Lock "f4af587c-08d3-457e-a20d-a5ea8aad311f" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2182.326892] env[62405]: DEBUG oslo_vmware.api [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948364, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.47715} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2182.327199] env[62405]: INFO nova.virt.vmwareapi.ds_util [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_4114c1aa-98e6-4ed8-8af0-94fe7821a3c7/OSTACK_IMG_4114c1aa-98e6-4ed8-8af0-94fe7821a3c7.vmdk to [datastore1] devstack-image-cache_base/a1697b8d-76e7-4c48-8b43-4dec5af3b5df/a1697b8d-76e7-4c48-8b43-4dec5af3b5df.vmdk. [ 2182.327360] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Cleaning up location [datastore1] OSTACK_IMG_4114c1aa-98e6-4ed8-8af0-94fe7821a3c7 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 2182.327527] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_4114c1aa-98e6-4ed8-8af0-94fe7821a3c7 {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2182.327772] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f516f048-ef98-498c-87b1-38e9fa25b9a7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2182.334179] env[62405]: DEBUG oslo_vmware.api [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Waiting for the task: (returnval){ [ 2182.334179] env[62405]: value = "task-1948366" [ 2182.334179] env[62405]: _type = "Task" [ 2182.334179] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2182.341082] env[62405]: DEBUG oslo_vmware.api [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948366, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2182.630015] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d163aaf-ae7e-407e-a64b-078c3ca31cdf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2182.630295] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9632e9e2-0032-44e5-882f-327779d3f1e4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2182.638539] env[62405]: DEBUG oslo_vmware.api [None req-2d163aaf-ae7e-407e-a64b-078c3ca31cdf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Waiting for the task: (returnval){ [ 2182.638539] env[62405]: value = "task-1948367" [ 2182.638539] env[62405]: _type = "Task" [ 2182.638539] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2182.647968] env[62405]: DEBUG oslo_vmware.api [None req-2d163aaf-ae7e-407e-a64b-078c3ca31cdf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948367, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2182.844164] env[62405]: DEBUG oslo_vmware.api [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948366, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.035156} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2182.844490] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2182.844702] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a1697b8d-76e7-4c48-8b43-4dec5af3b5df/a1697b8d-76e7-4c48-8b43-4dec5af3b5df.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2182.845560] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a1697b8d-76e7-4c48-8b43-4dec5af3b5df/a1697b8d-76e7-4c48-8b43-4dec5af3b5df.vmdk to [datastore1] 8185f9bc-48d5-4cb7-a48d-f744ff704868/8185f9bc-48d5-4cb7-a48d-f744ff704868.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2182.845560] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-62a0b484-43d8-4611-889e-4e5af8b2e609 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2182.851757] env[62405]: DEBUG oslo_vmware.api [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Waiting for the task: (returnval){ [ 2182.851757] env[62405]: value = "task-1948368" [ 2182.851757] env[62405]: _type = "Task" [ 2182.851757] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2182.860609] env[62405]: DEBUG oslo_vmware.api [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948368, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2183.149280] env[62405]: DEBUG oslo_vmware.api [None req-2d163aaf-ae7e-407e-a64b-078c3ca31cdf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948367, 'name': PowerOnVM_Task, 'duration_secs': 0.377214} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2183.149654] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d163aaf-ae7e-407e-a64b-078c3ca31cdf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2183.149970] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-2d163aaf-ae7e-407e-a64b-078c3ca31cdf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Updating instance 'b495f9e6-60c8-4509-a34f-2e7ed59b6d82' progress to 100 {{(pid=62405) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2183.223346] env[62405]: DEBUG oslo_concurrency.lockutils [None req-87ce13f8-ee45-4168-9ae6-cbcfc5a883c4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Acquiring lock "f4af587c-08d3-457e-a20d-a5ea8aad311f" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2183.223605] env[62405]: DEBUG oslo_concurrency.lockutils [None req-87ce13f8-ee45-4168-9ae6-cbcfc5a883c4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Lock "f4af587c-08d3-457e-a20d-a5ea8aad311f" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2183.223850] env[62405]: INFO nova.compute.manager [None req-87ce13f8-ee45-4168-9ae6-cbcfc5a883c4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Attaching volume 940b01a7-ec3e-472d-9a1f-d3374b7bb430 to /dev/sdb [ 2183.258495] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c67c513e-8e19-4ce2-b7de-211317ac451a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2183.266082] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcb21b60-53fd-456c-bb3e-7e194a3249f5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2183.279982] env[62405]: DEBUG nova.virt.block_device [None req-87ce13f8-ee45-4168-9ae6-cbcfc5a883c4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Updating existing volume attachment record: 906909e8-56a5-4f38-a2fb-478888c02fad {{(pid=62405) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 2183.361116] env[62405]: DEBUG oslo_vmware.api [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948368, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2183.863314] env[62405]: DEBUG oslo_vmware.api [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948368, 'name': CopyVirtualDisk_Task} progress is 40%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2184.365140] env[62405]: DEBUG oslo_vmware.api [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948368, 'name': CopyVirtualDisk_Task} progress is 60%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2184.865239] env[62405]: DEBUG oslo_vmware.api [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948368, 'name': CopyVirtualDisk_Task} progress is 80%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2184.926417] env[62405]: DEBUG oslo_concurrency.lockutils [None req-22313895-1261-4f2f-abc6-e59a365668cf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Acquiring lock "b495f9e6-60c8-4509-a34f-2e7ed59b6d82" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2184.926748] env[62405]: DEBUG oslo_concurrency.lockutils [None req-22313895-1261-4f2f-abc6-e59a365668cf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Lock "b495f9e6-60c8-4509-a34f-2e7ed59b6d82" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2184.926988] env[62405]: DEBUG nova.compute.manager [None req-22313895-1261-4f2f-abc6-e59a365668cf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Going to confirm migration 8 {{(pid=62405) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 2185.364812] env[62405]: DEBUG oslo_vmware.api [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948368, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.510457} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2185.365136] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a1697b8d-76e7-4c48-8b43-4dec5af3b5df/a1697b8d-76e7-4c48-8b43-4dec5af3b5df.vmdk to [datastore1] 8185f9bc-48d5-4cb7-a48d-f744ff704868/8185f9bc-48d5-4cb7-a48d-f744ff704868.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2185.366054] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c38f85f-70f8-4fa2-83cd-d76db0baee8d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2185.388402] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Reconfiguring VM instance instance-00000077 to attach disk [datastore1] 8185f9bc-48d5-4cb7-a48d-f744ff704868/8185f9bc-48d5-4cb7-a48d-f744ff704868.vmdk or device None with type streamOptimized {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2185.388683] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cefa757b-2219-434d-bb91-ff5e8fe3519e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2185.410090] env[62405]: DEBUG oslo_vmware.api [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Waiting for the task: (returnval){ [ 2185.410090] env[62405]: value = "task-1948372" [ 2185.410090] env[62405]: _type = "Task" [ 2185.410090] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2185.418400] env[62405]: DEBUG oslo_vmware.api [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948372, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2185.464887] env[62405]: DEBUG oslo_concurrency.lockutils [None req-22313895-1261-4f2f-abc6-e59a365668cf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Acquiring lock "refresh_cache-b495f9e6-60c8-4509-a34f-2e7ed59b6d82" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2185.465157] env[62405]: DEBUG oslo_concurrency.lockutils [None req-22313895-1261-4f2f-abc6-e59a365668cf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Acquired lock "refresh_cache-b495f9e6-60c8-4509-a34f-2e7ed59b6d82" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2185.465395] env[62405]: DEBUG nova.network.neutron [None req-22313895-1261-4f2f-abc6-e59a365668cf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2185.465643] env[62405]: DEBUG nova.objects.instance [None req-22313895-1261-4f2f-abc6-e59a365668cf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Lazy-loading 'info_cache' on Instance uuid b495f9e6-60c8-4509-a34f-2e7ed59b6d82 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2185.920200] env[62405]: DEBUG oslo_vmware.api [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948372, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2186.420879] env[62405]: DEBUG oslo_vmware.api [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948372, 'name': ReconfigVM_Task, 'duration_secs': 0.83192} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2186.421484] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Reconfigured VM instance instance-00000077 to attach disk [datastore1] 8185f9bc-48d5-4cb7-a48d-f744ff704868/8185f9bc-48d5-4cb7-a48d-f744ff704868.vmdk or device None with type streamOptimized {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2186.421866] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-959bf019-1d17-4cc4-aa19-ee727e6b01d8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2186.428231] env[62405]: DEBUG oslo_vmware.api [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Waiting for the task: (returnval){ [ 2186.428231] env[62405]: value = "task-1948373" [ 2186.428231] env[62405]: _type = "Task" [ 2186.428231] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2186.436466] env[62405]: DEBUG oslo_vmware.api [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948373, 'name': Rename_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2186.736374] env[62405]: DEBUG nova.network.neutron [None req-22313895-1261-4f2f-abc6-e59a365668cf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Updating instance_info_cache with network_info: [{"id": "a7c7d269-027f-42d9-819a-e04ab445d816", "address": "fa:16:3e:c7:9c:e0", "network": {"id": "f9883b88-e1ff-4499-9214-4cc672383a10", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1580742860-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dd9a1a4650b34e388c50c7575cf09a7c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0a88b707-352e-4be7-b1d6-ad6074b40ed9", "external-id": "nsx-vlan-transportzone-789", "segmentation_id": 789, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa7c7d269-02", "ovs_interfaceid": "a7c7d269-027f-42d9-819a-e04ab445d816", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2186.938155] env[62405]: DEBUG oslo_vmware.api [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948373, 'name': Rename_Task, 'duration_secs': 0.306455} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2186.938448] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2186.938700] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b7eec94d-b675-4f7b-8ae4-afe5082b29b0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2186.945263] env[62405]: DEBUG oslo_vmware.api [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Waiting for the task: (returnval){ [ 2186.945263] env[62405]: value = "task-1948374" [ 2186.945263] env[62405]: _type = "Task" [ 2186.945263] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2186.952857] env[62405]: DEBUG oslo_vmware.api [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948374, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2187.238934] env[62405]: DEBUG oslo_concurrency.lockutils [None req-22313895-1261-4f2f-abc6-e59a365668cf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Releasing lock "refresh_cache-b495f9e6-60c8-4509-a34f-2e7ed59b6d82" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2187.239264] env[62405]: DEBUG nova.objects.instance [None req-22313895-1261-4f2f-abc6-e59a365668cf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Lazy-loading 'migration_context' on Instance uuid b495f9e6-60c8-4509-a34f-2e7ed59b6d82 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2187.456815] env[62405]: DEBUG oslo_vmware.api [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948374, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2187.743116] env[62405]: DEBUG nova.objects.base [None req-22313895-1261-4f2f-abc6-e59a365668cf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=62405) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2187.744309] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12dcf49a-f537-44ab-b81e-6fd8af966ebc {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2187.762988] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-07c74f97-f09d-46cc-b34d-ab4220c1fad8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2187.769231] env[62405]: DEBUG oslo_vmware.api [None req-22313895-1261-4f2f-abc6-e59a365668cf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Waiting for the task: (returnval){ [ 2187.769231] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52aa86fb-e38c-9777-6b0c-1494333a73a2" [ 2187.769231] env[62405]: _type = "Task" [ 2187.769231] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2187.777705] env[62405]: DEBUG oslo_vmware.api [None req-22313895-1261-4f2f-abc6-e59a365668cf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52aa86fb-e38c-9777-6b0c-1494333a73a2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2187.957312] env[62405]: DEBUG oslo_vmware.api [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948374, 'name': PowerOnVM_Task, 'duration_secs': 0.882163} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2187.957736] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2188.068019] env[62405]: DEBUG nova.compute.manager [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2188.068019] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6609bbf6-15d2-4e5b-9ff6-b8cad3417816 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2188.282710] env[62405]: DEBUG oslo_vmware.api [None req-22313895-1261-4f2f-abc6-e59a365668cf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52aa86fb-e38c-9777-6b0c-1494333a73a2, 'name': SearchDatastore_Task, 'duration_secs': 0.0071} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2188.282710] env[62405]: DEBUG oslo_concurrency.lockutils [None req-22313895-1261-4f2f-abc6-e59a365668cf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2188.282710] env[62405]: DEBUG oslo_concurrency.lockutils [None req-22313895-1261-4f2f-abc6-e59a365668cf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2188.586599] env[62405]: DEBUG oslo_concurrency.lockutils [None req-d8ce5356-acdd-4172-a87e-1c7ff53976f4 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Lock "8185f9bc-48d5-4cb7-a48d-f744ff704868" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 20.796s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2188.869235] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d284f278-7e97-4f0f-abf0-65de517c16bf {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2188.883615] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e90b3f60-cd0f-4b42-a8ae-88a247b6bd9b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2188.917303] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fdb6108-53b0-4672-b68b-8dea9ac664b6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2188.925785] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d153db8-bdaf-434d-b3c2-740d34725b9c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2188.941380] env[62405]: DEBUG nova.compute.provider_tree [None req-22313895-1261-4f2f-abc6-e59a365668cf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2189.447208] env[62405]: DEBUG nova.scheduler.client.report [None req-22313895-1261-4f2f-abc6-e59a365668cf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2190.333261] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-87ce13f8-ee45-4168-9ae6-cbcfc5a883c4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Volume attach. Driver type: vmdk {{(pid=62405) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 2190.333509] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-87ce13f8-ee45-4168-9ae6-cbcfc5a883c4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-401615', 'volume_id': '940b01a7-ec3e-472d-9a1f-d3374b7bb430', 'name': 'volume-940b01a7-ec3e-472d-9a1f-d3374b7bb430', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f4af587c-08d3-457e-a20d-a5ea8aad311f', 'attached_at': '', 'detached_at': '', 'volume_id': '940b01a7-ec3e-472d-9a1f-d3374b7bb430', 'serial': '940b01a7-ec3e-472d-9a1f-d3374b7bb430'} {{(pid=62405) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 2190.334395] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e641416-abfa-4601-9f1e-7dcd1de721bd {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2190.351258] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-766d0f1f-2f41-4ba0-aa0e-40b75e7dcaa2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2190.376047] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-87ce13f8-ee45-4168-9ae6-cbcfc5a883c4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Reconfiguring VM instance instance-00000078 to attach disk [datastore1] volume-940b01a7-ec3e-472d-9a1f-d3374b7bb430/volume-940b01a7-ec3e-472d-9a1f-d3374b7bb430.vmdk or device None with type thin {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2190.376421] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fb3a0ee7-5362-4b1a-91a9-57b0e77b11b0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2190.394847] env[62405]: DEBUG oslo_vmware.api [None req-87ce13f8-ee45-4168-9ae6-cbcfc5a883c4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for the task: (returnval){ [ 2190.394847] env[62405]: value = "task-1948376" [ 2190.394847] env[62405]: _type = "Task" [ 2190.394847] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2190.404873] env[62405]: DEBUG oslo_vmware.api [None req-87ce13f8-ee45-4168-9ae6-cbcfc5a883c4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948376, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2190.456493] env[62405]: DEBUG oslo_concurrency.lockutils [None req-22313895-1261-4f2f-abc6-e59a365668cf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.175s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2190.905959] env[62405]: DEBUG oslo_vmware.api [None req-87ce13f8-ee45-4168-9ae6-cbcfc5a883c4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948376, 'name': ReconfigVM_Task, 'duration_secs': 0.355896} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2190.906294] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-87ce13f8-ee45-4168-9ae6-cbcfc5a883c4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Reconfigured VM instance instance-00000078 to attach disk [datastore1] volume-940b01a7-ec3e-472d-9a1f-d3374b7bb430/volume-940b01a7-ec3e-472d-9a1f-d3374b7bb430.vmdk or device None with type thin {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2190.910883] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-46adaede-8b0d-45b5-ac44-54c6da1b4e8d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2190.925512] env[62405]: DEBUG oslo_vmware.api [None req-87ce13f8-ee45-4168-9ae6-cbcfc5a883c4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for the task: (returnval){ [ 2190.925512] env[62405]: value = "task-1948377" [ 2190.925512] env[62405]: _type = "Task" [ 2190.925512] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2190.935190] env[62405]: DEBUG oslo_vmware.api [None req-87ce13f8-ee45-4168-9ae6-cbcfc5a883c4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948377, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2191.022760] env[62405]: INFO nova.scheduler.client.report [None req-22313895-1261-4f2f-abc6-e59a365668cf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Deleted allocation for migration 2b817578-8342-441e-a433-69c477465cdd [ 2191.435604] env[62405]: DEBUG oslo_vmware.api [None req-87ce13f8-ee45-4168-9ae6-cbcfc5a883c4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948377, 'name': ReconfigVM_Task, 'duration_secs': 0.141632} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2191.435909] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-87ce13f8-ee45-4168-9ae6-cbcfc5a883c4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-401615', 'volume_id': '940b01a7-ec3e-472d-9a1f-d3374b7bb430', 'name': 'volume-940b01a7-ec3e-472d-9a1f-d3374b7bb430', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f4af587c-08d3-457e-a20d-a5ea8aad311f', 'attached_at': '', 'detached_at': '', 'volume_id': '940b01a7-ec3e-472d-9a1f-d3374b7bb430', 'serial': '940b01a7-ec3e-472d-9a1f-d3374b7bb430'} {{(pid=62405) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 2191.527972] env[62405]: DEBUG oslo_concurrency.lockutils [None req-22313895-1261-4f2f-abc6-e59a365668cf tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Lock "b495f9e6-60c8-4509-a34f-2e7ed59b6d82" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.601s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2192.095434] env[62405]: DEBUG oslo_concurrency.lockutils [None req-1ca7c186-6d7b-40b0-a7f1-2e33d4c11265 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Acquiring lock "b495f9e6-60c8-4509-a34f-2e7ed59b6d82" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2192.095709] env[62405]: DEBUG oslo_concurrency.lockutils [None req-1ca7c186-6d7b-40b0-a7f1-2e33d4c11265 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Lock "b495f9e6-60c8-4509-a34f-2e7ed59b6d82" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2192.095925] env[62405]: DEBUG oslo_concurrency.lockutils [None req-1ca7c186-6d7b-40b0-a7f1-2e33d4c11265 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Acquiring lock "b495f9e6-60c8-4509-a34f-2e7ed59b6d82-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2192.096129] env[62405]: DEBUG oslo_concurrency.lockutils [None req-1ca7c186-6d7b-40b0-a7f1-2e33d4c11265 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Lock "b495f9e6-60c8-4509-a34f-2e7ed59b6d82-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2192.096302] env[62405]: DEBUG oslo_concurrency.lockutils [None req-1ca7c186-6d7b-40b0-a7f1-2e33d4c11265 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Lock "b495f9e6-60c8-4509-a34f-2e7ed59b6d82-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2192.098362] env[62405]: INFO nova.compute.manager [None req-1ca7c186-6d7b-40b0-a7f1-2e33d4c11265 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Terminating instance [ 2192.471347] env[62405]: DEBUG nova.objects.instance [None req-87ce13f8-ee45-4168-9ae6-cbcfc5a883c4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Lazy-loading 'flavor' on Instance uuid f4af587c-08d3-457e-a20d-a5ea8aad311f {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2192.601544] env[62405]: DEBUG nova.compute.manager [None req-1ca7c186-6d7b-40b0-a7f1-2e33d4c11265 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2192.601760] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-1ca7c186-6d7b-40b0-a7f1-2e33d4c11265 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2192.602672] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fac13c7-f351-462e-8caa-aa03546ba805 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2192.610854] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ca7c186-6d7b-40b0-a7f1-2e33d4c11265 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2192.611114] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e05e99df-f4f3-4204-8bff-4b3e2129656b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2192.616899] env[62405]: DEBUG oslo_vmware.api [None req-1ca7c186-6d7b-40b0-a7f1-2e33d4c11265 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Waiting for the task: (returnval){ [ 2192.616899] env[62405]: value = "task-1948378" [ 2192.616899] env[62405]: _type = "Task" [ 2192.616899] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2192.625684] env[62405]: DEBUG oslo_vmware.api [None req-1ca7c186-6d7b-40b0-a7f1-2e33d4c11265 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948378, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2192.947910] env[62405]: INFO nova.compute.manager [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Rebuilding instance [ 2192.976031] env[62405]: DEBUG oslo_concurrency.lockutils [None req-87ce13f8-ee45-4168-9ae6-cbcfc5a883c4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Lock "f4af587c-08d3-457e-a20d-a5ea8aad311f" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 9.752s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2192.988283] env[62405]: DEBUG nova.compute.manager [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2192.989202] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a68aafb-f26d-4787-8e89-09706357a58e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2193.126724] env[62405]: DEBUG oslo_vmware.api [None req-1ca7c186-6d7b-40b0-a7f1-2e33d4c11265 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948378, 'name': PowerOffVM_Task, 'duration_secs': 0.224782} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2193.126986] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ca7c186-6d7b-40b0-a7f1-2e33d4c11265 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2193.127180] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-1ca7c186-6d7b-40b0-a7f1-2e33d4c11265 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2193.127424] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1dd4540a-ecce-40f7-9da0-b34196085bea {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2193.295659] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-1ca7c186-6d7b-40b0-a7f1-2e33d4c11265 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2193.295889] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-1ca7c186-6d7b-40b0-a7f1-2e33d4c11265 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2193.296084] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ca7c186-6d7b-40b0-a7f1-2e33d4c11265 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Deleting the datastore file [datastore1] b495f9e6-60c8-4509-a34f-2e7ed59b6d82 {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2193.296346] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4423ee23-52cd-4f5c-be67-c336fd618f2c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2193.303354] env[62405]: DEBUG oslo_vmware.api [None req-1ca7c186-6d7b-40b0-a7f1-2e33d4c11265 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Waiting for the task: (returnval){ [ 2193.303354] env[62405]: value = "task-1948380" [ 2193.303354] env[62405]: _type = "Task" [ 2193.303354] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2193.311343] env[62405]: DEBUG oslo_vmware.api [None req-1ca7c186-6d7b-40b0-a7f1-2e33d4c11265 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948380, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2193.813607] env[62405]: DEBUG oslo_vmware.api [None req-1ca7c186-6d7b-40b0-a7f1-2e33d4c11265 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948380, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.148013} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2193.813911] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ca7c186-6d7b-40b0-a7f1-2e33d4c11265 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2193.814043] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-1ca7c186-6d7b-40b0-a7f1-2e33d4c11265 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2193.814228] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-1ca7c186-6d7b-40b0-a7f1-2e33d4c11265 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2193.814401] env[62405]: INFO nova.compute.manager [None req-1ca7c186-6d7b-40b0-a7f1-2e33d4c11265 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Took 1.21 seconds to destroy the instance on the hypervisor. [ 2193.814643] env[62405]: DEBUG oslo.service.loopingcall [None req-1ca7c186-6d7b-40b0-a7f1-2e33d4c11265 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2193.814837] env[62405]: DEBUG nova.compute.manager [-] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2193.814930] env[62405]: DEBUG nova.network.neutron [-] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2194.003855] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2194.004297] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-122478cb-c70a-43ed-a4b4-1807f5b08a2d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2194.013086] env[62405]: DEBUG oslo_vmware.api [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for the task: (returnval){ [ 2194.013086] env[62405]: value = "task-1948381" [ 2194.013086] env[62405]: _type = "Task" [ 2194.013086] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2194.022724] env[62405]: DEBUG oslo_vmware.api [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948381, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2194.523219] env[62405]: DEBUG oslo_vmware.api [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948381, 'name': PowerOffVM_Task, 'duration_secs': 0.175031} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2194.524425] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2194.527700] env[62405]: DEBUG nova.compute.manager [req-dade7c83-9a6e-49d0-815b-9139a2bd0e7f req-7acb2b1b-26a7-4f8d-950a-900321f82d1e service nova] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Received event network-vif-deleted-a7c7d269-027f-42d9-819a-e04ab445d816 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2194.527891] env[62405]: INFO nova.compute.manager [req-dade7c83-9a6e-49d0-815b-9139a2bd0e7f req-7acb2b1b-26a7-4f8d-950a-900321f82d1e service nova] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Neutron deleted interface a7c7d269-027f-42d9-819a-e04ab445d816; detaching it from the instance and deleting it from the info cache [ 2194.528077] env[62405]: DEBUG nova.network.neutron [req-dade7c83-9a6e-49d0-815b-9139a2bd0e7f req-7acb2b1b-26a7-4f8d-950a-900321f82d1e service nova] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2194.583048] env[62405]: INFO nova.compute.manager [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Detaching volume 940b01a7-ec3e-472d-9a1f-d3374b7bb430 [ 2194.612363] env[62405]: INFO nova.virt.block_device [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Attempting to driver detach volume 940b01a7-ec3e-472d-9a1f-d3374b7bb430 from mountpoint /dev/sdb [ 2194.612840] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Volume detach. Driver type: vmdk {{(pid=62405) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2194.612840] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-401615', 'volume_id': '940b01a7-ec3e-472d-9a1f-d3374b7bb430', 'name': 'volume-940b01a7-ec3e-472d-9a1f-d3374b7bb430', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f4af587c-08d3-457e-a20d-a5ea8aad311f', 'attached_at': '', 'detached_at': '', 'volume_id': '940b01a7-ec3e-472d-9a1f-d3374b7bb430', 'serial': '940b01a7-ec3e-472d-9a1f-d3374b7bb430'} {{(pid=62405) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2194.613642] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ebfef1d-2bab-4562-ae83-38d956b91da1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2194.636443] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53cbb774-246e-49d1-966c-89368481bc1a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2194.643014] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4093d3f5-1003-4ac0-883b-4adec6c79b4e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2194.663249] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61ac15ac-0782-41c1-9f78-bb0b4de8dd0f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2194.678043] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] The volume has not been displaced from its original location: [datastore1] volume-940b01a7-ec3e-472d-9a1f-d3374b7bb430/volume-940b01a7-ec3e-472d-9a1f-d3374b7bb430.vmdk. No consolidation needed. {{(pid=62405) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2194.683258] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Reconfiguring VM instance instance-00000078 to detach disk 2001 {{(pid=62405) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2194.683561] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6b920ad4-77cc-4021-acba-354291acdcf5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2194.701176] env[62405]: DEBUG oslo_vmware.api [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for the task: (returnval){ [ 2194.701176] env[62405]: value = "task-1948382" [ 2194.701176] env[62405]: _type = "Task" [ 2194.701176] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2194.708722] env[62405]: DEBUG oslo_vmware.api [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948382, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2194.992115] env[62405]: DEBUG nova.network.neutron [-] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2195.030513] env[62405]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7f2a2131-7812-4283-ba1d-bd944df366c4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2195.044927] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5faaf4f-e878-44de-bd93-abf72b9ee0f0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2195.085606] env[62405]: DEBUG nova.compute.manager [req-dade7c83-9a6e-49d0-815b-9139a2bd0e7f req-7acb2b1b-26a7-4f8d-950a-900321f82d1e service nova] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Detach interface failed, port_id=a7c7d269-027f-42d9-819a-e04ab445d816, reason: Instance b495f9e6-60c8-4509-a34f-2e7ed59b6d82 could not be found. {{(pid=62405) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11483}} [ 2195.210212] env[62405]: DEBUG oslo_vmware.api [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948382, 'name': ReconfigVM_Task, 'duration_secs': 0.184762} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2195.210495] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Reconfigured VM instance instance-00000078 to detach disk 2001 {{(pid=62405) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2195.215154] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-358493e8-49e0-4ad4-ada7-38961c86906d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2195.229547] env[62405]: DEBUG oslo_vmware.api [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for the task: (returnval){ [ 2195.229547] env[62405]: value = "task-1948383" [ 2195.229547] env[62405]: _type = "Task" [ 2195.229547] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2195.237602] env[62405]: DEBUG oslo_vmware.api [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948383, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2195.495411] env[62405]: INFO nova.compute.manager [-] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Took 1.68 seconds to deallocate network for instance. [ 2195.740557] env[62405]: DEBUG oslo_vmware.api [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948383, 'name': ReconfigVM_Task, 'duration_secs': 0.150634} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2195.740834] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-401615', 'volume_id': '940b01a7-ec3e-472d-9a1f-d3374b7bb430', 'name': 'volume-940b01a7-ec3e-472d-9a1f-d3374b7bb430', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f4af587c-08d3-457e-a20d-a5ea8aad311f', 'attached_at': '', 'detached_at': '', 'volume_id': '940b01a7-ec3e-472d-9a1f-d3374b7bb430', 'serial': '940b01a7-ec3e-472d-9a1f-d3374b7bb430'} {{(pid=62405) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2196.003047] env[62405]: DEBUG oslo_concurrency.lockutils [None req-1ca7c186-6d7b-40b0-a7f1-2e33d4c11265 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2196.003403] env[62405]: DEBUG oslo_concurrency.lockutils [None req-1ca7c186-6d7b-40b0-a7f1-2e33d4c11265 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2196.003403] env[62405]: DEBUG oslo_concurrency.lockutils [None req-1ca7c186-6d7b-40b0-a7f1-2e33d4c11265 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2196.029049] env[62405]: INFO nova.scheduler.client.report [None req-1ca7c186-6d7b-40b0-a7f1-2e33d4c11265 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Deleted allocations for instance b495f9e6-60c8-4509-a34f-2e7ed59b6d82 [ 2196.537505] env[62405]: DEBUG oslo_concurrency.lockutils [None req-1ca7c186-6d7b-40b0-a7f1-2e33d4c11265 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Lock "b495f9e6-60c8-4509-a34f-2e7ed59b6d82" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.442s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2196.786823] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2196.787103] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-95918d65-92d7-4f50-9ec9-6df50c2c3342 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2196.794806] env[62405]: DEBUG oslo_vmware.api [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for the task: (returnval){ [ 2196.794806] env[62405]: value = "task-1948384" [ 2196.794806] env[62405]: _type = "Task" [ 2196.794806] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2196.801926] env[62405]: DEBUG oslo_vmware.api [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948384, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2197.305621] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] VM already powered off {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 2197.305898] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Volume detach. Driver type: vmdk {{(pid=62405) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2197.306059] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-401615', 'volume_id': '940b01a7-ec3e-472d-9a1f-d3374b7bb430', 'name': 'volume-940b01a7-ec3e-472d-9a1f-d3374b7bb430', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f4af587c-08d3-457e-a20d-a5ea8aad311f', 'attached_at': '', 'detached_at': '', 'volume_id': '940b01a7-ec3e-472d-9a1f-d3374b7bb430', 'serial': '940b01a7-ec3e-472d-9a1f-d3374b7bb430'} {{(pid=62405) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2197.306814] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-585e72bc-bb12-4ce7-bad3-ae054f6d2fb0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2197.325341] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1684c780-bedf-463e-892c-9be5b4f21b43 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2197.331209] env[62405]: WARNING nova.virt.vmwareapi.driver [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] The volume None does not exist!: nova.exception.DiskNotFound: Unable to find volume [ 2197.331471] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2197.332208] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b36f5521-0df6-4edd-baf1-03dc43dd49aa {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2197.338542] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2197.338752] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c9ab9341-85ce-458a-9ae4-62548a84d15c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2198.167102] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Acquiring lock "27928d2b-1ed5-4326-81e4-1dade794c6a7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2198.167529] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Lock "27928d2b-1ed5-4326-81e4-1dade794c6a7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2198.670188] env[62405]: DEBUG nova.compute.manager [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2199.193128] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2199.193386] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2199.195380] env[62405]: INFO nova.compute.claims [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2199.398928] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2199.399145] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2199.399367] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Deleting the datastore file [datastore1] f4af587c-08d3-457e-a20d-a5ea8aad311f {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2199.399637] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b6f150b1-4b1a-4418-9511-3e35a8259850 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2199.406306] env[62405]: DEBUG oslo_vmware.api [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for the task: (returnval){ [ 2199.406306] env[62405]: value = "task-1948386" [ 2199.406306] env[62405]: _type = "Task" [ 2199.406306] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2199.413718] env[62405]: DEBUG oslo_vmware.api [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948386, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2199.916208] env[62405]: DEBUG oslo_vmware.api [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948386, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.132384} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2199.916571] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2199.916644] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2199.916800] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2200.260447] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-084c6fa6-7658-425f-a2da-7ab9cc238747 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2200.267882] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3b87e74-669d-42cf-a6c3-028df0986d95 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2200.296753] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b960ed48-2b21-41d8-812c-6e31b1990026 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2200.303926] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea9c5100-5341-44af-b5e4-ffc465db1ab5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2200.316900] env[62405]: DEBUG nova.compute.provider_tree [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 172, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2200.421697] env[62405]: INFO nova.virt.block_device [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Booting with volume 940b01a7-ec3e-472d-9a1f-d3374b7bb430 at /dev/sdb [ 2200.454279] env[62405]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ae32b6e2-09cd-407d-8cc3-82dde7794273 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2200.463776] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4495c8f-d259-481d-889b-49ba2c5ae40e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2200.488735] env[62405]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6ed3f5f6-9f6f-47f5-af49-6c14cf55b4a7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2200.495813] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e76b5d1-205e-465c-925a-50a881dbd075 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2200.519048] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93db94c3-0b01-494c-97de-d2f51e72983b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2200.524709] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a5026a2-42de-41cb-854c-657328bd06e1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2200.537020] env[62405]: DEBUG nova.virt.block_device [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Updating existing volume attachment record: c7869a92-39aa-4a1e-840c-a53e6a29a503 {{(pid=62405) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 2200.836105] env[62405]: ERROR nova.scheduler.client.report [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [req-825de704-c266-453c-917f-fda9340e7b6c] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 172, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7d5eded7-a501-4fa6-b1d3-60e273d555d7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-825de704-c266-453c-917f-fda9340e7b6c"}]} [ 2200.851614] env[62405]: DEBUG nova.scheduler.client.report [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Refreshing inventories for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 2200.863436] env[62405]: DEBUG nova.scheduler.client.report [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Updating ProviderTree inventory for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 2200.863639] env[62405]: DEBUG nova.compute.provider_tree [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2200.872813] env[62405]: DEBUG nova.scheduler.client.report [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Refreshing aggregate associations for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7, aggregates: None {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 2200.893043] env[62405]: DEBUG nova.scheduler.client.report [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Refreshing trait associations for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 2200.941608] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64f5c872-9eb6-4597-bc42-63dea30bcdea {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2200.948872] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56b90f98-b3e2-4aa8-961b-bb75697a7b77 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2200.977288] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96c9c490-7a44-4b24-a45d-36601d693830 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2200.984300] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39abb06f-a671-4463-8055-37c6954bfaa1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2200.998087] env[62405]: DEBUG nova.compute.provider_tree [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 172, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2201.528556] env[62405]: DEBUG nova.scheduler.client.report [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Updated inventory for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with generation 191 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 172, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 2201.528817] env[62405]: DEBUG nova.compute.provider_tree [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Updating resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 generation from 191 to 192 during operation: update_inventory {{(pid=62405) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2201.528995] env[62405]: DEBUG nova.compute.provider_tree [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 172, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2202.034057] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.840s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2202.034459] env[62405]: DEBUG nova.compute.manager [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2202.539204] env[62405]: DEBUG nova.compute.utils [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2202.540649] env[62405]: DEBUG nova.compute.manager [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2202.540823] env[62405]: DEBUG nova.network.neutron [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2202.587186] env[62405]: DEBUG nova.policy [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f9c18747ac7149dba0e1c0a8fc6c0b7e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dd9a1a4650b34e388c50c7575cf09a7c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 2202.641111] env[62405]: DEBUG nova.virt.hardware [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2202.641380] env[62405]: DEBUG nova.virt.hardware [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2202.641542] env[62405]: DEBUG nova.virt.hardware [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2202.641727] env[62405]: DEBUG nova.virt.hardware [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2202.641874] env[62405]: DEBUG nova.virt.hardware [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2202.642081] env[62405]: DEBUG nova.virt.hardware [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2202.642247] env[62405]: DEBUG nova.virt.hardware [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2202.642409] env[62405]: DEBUG nova.virt.hardware [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2202.642575] env[62405]: DEBUG nova.virt.hardware [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2202.642736] env[62405]: DEBUG nova.virt.hardware [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2202.642918] env[62405]: DEBUG nova.virt.hardware [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2202.643800] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d2a59f5-77d1-4495-a722-122185d71524 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2202.652123] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56ddf861-2573-4dba-a1b5-1b39ab653704 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2202.665380] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:21:aa:b0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '77aa121f-8fb6-42f3-aaea-43addfe449b2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9dc5f509-f8ba-495b-8931-0591e98d462c', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2202.672380] env[62405]: DEBUG oslo.service.loopingcall [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2202.672681] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2202.672835] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c703cad5-0d3a-4b08-b6fb-3a138c32d7d6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2202.692238] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2202.692238] env[62405]: value = "task-1948387" [ 2202.692238] env[62405]: _type = "Task" [ 2202.692238] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2202.699422] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1948387, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2202.926202] env[62405]: DEBUG nova.network.neutron [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Successfully created port: 9cd0704c-b882-4e84-a2fd-533974d3bbee {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2203.044212] env[62405]: DEBUG nova.compute.manager [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2203.202265] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1948387, 'name': CreateVM_Task, 'duration_secs': 0.437386} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2203.202437] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2203.203125] env[62405]: DEBUG oslo_concurrency.lockutils [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2203.203296] env[62405]: DEBUG oslo_concurrency.lockutils [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2203.203620] env[62405]: DEBUG oslo_concurrency.lockutils [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2203.203874] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e35b87d2-a2ba-40cf-838a-b0a0d259831a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2203.208882] env[62405]: DEBUG oslo_vmware.api [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for the task: (returnval){ [ 2203.208882] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52c2586d-50a2-5646-d56a-483e6f6a56dc" [ 2203.208882] env[62405]: _type = "Task" [ 2203.208882] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2203.217412] env[62405]: DEBUG oslo_vmware.api [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52c2586d-50a2-5646-d56a-483e6f6a56dc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2203.719163] env[62405]: DEBUG oslo_vmware.api [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52c2586d-50a2-5646-d56a-483e6f6a56dc, 'name': SearchDatastore_Task, 'duration_secs': 0.012177} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2203.719808] env[62405]: DEBUG oslo_concurrency.lockutils [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2203.719808] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2203.719926] env[62405]: DEBUG oslo_concurrency.lockutils [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2203.720083] env[62405]: DEBUG oslo_concurrency.lockutils [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2203.720230] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2203.720485] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1a8f3a5b-3e08-401b-ba6e-549d399d54be {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2203.728268] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2203.728460] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2203.729166] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-51506af5-b9bc-44d6-9184-edc9b9e82a6d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2203.734122] env[62405]: DEBUG oslo_vmware.api [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for the task: (returnval){ [ 2203.734122] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52e19435-bf1d-deaa-4672-e25b330f5586" [ 2203.734122] env[62405]: _type = "Task" [ 2203.734122] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2203.740945] env[62405]: DEBUG oslo_vmware.api [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52e19435-bf1d-deaa-4672-e25b330f5586, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2204.053224] env[62405]: DEBUG nova.compute.manager [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2204.079081] env[62405]: DEBUG nova.virt.hardware [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2204.079431] env[62405]: DEBUG nova.virt.hardware [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2204.079542] env[62405]: DEBUG nova.virt.hardware [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2204.079655] env[62405]: DEBUG nova.virt.hardware [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2204.079800] env[62405]: DEBUG nova.virt.hardware [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2204.080054] env[62405]: DEBUG nova.virt.hardware [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2204.080272] env[62405]: DEBUG nova.virt.hardware [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2204.080436] env[62405]: DEBUG nova.virt.hardware [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2204.080601] env[62405]: DEBUG nova.virt.hardware [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2204.080762] env[62405]: DEBUG nova.virt.hardware [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2204.080933] env[62405]: DEBUG nova.virt.hardware [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2204.081801] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f69e97ca-f71a-4001-a4b4-dfb289b10ba5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2204.089312] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab1642ed-5eb7-49c1-81de-f754feb5b4bf {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2204.245049] env[62405]: DEBUG oslo_vmware.api [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52e19435-bf1d-deaa-4672-e25b330f5586, 'name': SearchDatastore_Task, 'duration_secs': 0.008528} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2204.245207] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-66ac80d0-e5d7-49ae-9f5f-487b7c5d5067 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2204.250164] env[62405]: DEBUG oslo_vmware.api [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for the task: (returnval){ [ 2204.250164] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52fa3322-9070-e91b-30f6-5e4927551d7f" [ 2204.250164] env[62405]: _type = "Task" [ 2204.250164] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2204.257379] env[62405]: DEBUG oslo_vmware.api [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52fa3322-9070-e91b-30f6-5e4927551d7f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2204.340462] env[62405]: DEBUG nova.compute.manager [req-ee83bbf0-14a4-4a0b-b84d-5b39d01c0625 req-8e8ec3a2-6834-4228-be46-2059dce04d20 service nova] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Received event network-vif-plugged-9cd0704c-b882-4e84-a2fd-533974d3bbee {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2204.340804] env[62405]: DEBUG oslo_concurrency.lockutils [req-ee83bbf0-14a4-4a0b-b84d-5b39d01c0625 req-8e8ec3a2-6834-4228-be46-2059dce04d20 service nova] Acquiring lock "27928d2b-1ed5-4326-81e4-1dade794c6a7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2204.340929] env[62405]: DEBUG oslo_concurrency.lockutils [req-ee83bbf0-14a4-4a0b-b84d-5b39d01c0625 req-8e8ec3a2-6834-4228-be46-2059dce04d20 service nova] Lock "27928d2b-1ed5-4326-81e4-1dade794c6a7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2204.341121] env[62405]: DEBUG oslo_concurrency.lockutils [req-ee83bbf0-14a4-4a0b-b84d-5b39d01c0625 req-8e8ec3a2-6834-4228-be46-2059dce04d20 service nova] Lock "27928d2b-1ed5-4326-81e4-1dade794c6a7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2204.341355] env[62405]: DEBUG nova.compute.manager [req-ee83bbf0-14a4-4a0b-b84d-5b39d01c0625 req-8e8ec3a2-6834-4228-be46-2059dce04d20 service nova] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] No waiting events found dispatching network-vif-plugged-9cd0704c-b882-4e84-a2fd-533974d3bbee {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2204.341498] env[62405]: WARNING nova.compute.manager [req-ee83bbf0-14a4-4a0b-b84d-5b39d01c0625 req-8e8ec3a2-6834-4228-be46-2059dce04d20 service nova] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Received unexpected event network-vif-plugged-9cd0704c-b882-4e84-a2fd-533974d3bbee for instance with vm_state building and task_state spawning. [ 2204.424280] env[62405]: DEBUG nova.network.neutron [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Successfully updated port: 9cd0704c-b882-4e84-a2fd-533974d3bbee {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2204.760617] env[62405]: DEBUG oslo_vmware.api [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52fa3322-9070-e91b-30f6-5e4927551d7f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2204.927092] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Acquiring lock "refresh_cache-27928d2b-1ed5-4326-81e4-1dade794c6a7" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2204.927092] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Acquired lock "refresh_cache-27928d2b-1ed5-4326-81e4-1dade794c6a7" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2204.927310] env[62405]: DEBUG nova.network.neutron [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2205.261932] env[62405]: DEBUG oslo_vmware.api [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52fa3322-9070-e91b-30f6-5e4927551d7f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2205.457105] env[62405]: DEBUG nova.network.neutron [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2205.574855] env[62405]: DEBUG nova.network.neutron [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Updating instance_info_cache with network_info: [{"id": "9cd0704c-b882-4e84-a2fd-533974d3bbee", "address": "fa:16:3e:7b:21:33", "network": {"id": "f9883b88-e1ff-4499-9214-4cc672383a10", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1580742860-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dd9a1a4650b34e388c50c7575cf09a7c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0a88b707-352e-4be7-b1d6-ad6074b40ed9", "external-id": "nsx-vlan-transportzone-789", "segmentation_id": 789, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9cd0704c-b8", "ovs_interfaceid": "9cd0704c-b882-4e84-a2fd-533974d3bbee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2205.762037] env[62405]: DEBUG oslo_vmware.api [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52fa3322-9070-e91b-30f6-5e4927551d7f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2206.077662] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Releasing lock "refresh_cache-27928d2b-1ed5-4326-81e4-1dade794c6a7" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2206.077995] env[62405]: DEBUG nova.compute.manager [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Instance network_info: |[{"id": "9cd0704c-b882-4e84-a2fd-533974d3bbee", "address": "fa:16:3e:7b:21:33", "network": {"id": "f9883b88-e1ff-4499-9214-4cc672383a10", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1580742860-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dd9a1a4650b34e388c50c7575cf09a7c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0a88b707-352e-4be7-b1d6-ad6074b40ed9", "external-id": "nsx-vlan-transportzone-789", "segmentation_id": 789, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9cd0704c-b8", "ovs_interfaceid": "9cd0704c-b882-4e84-a2fd-533974d3bbee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2206.078430] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7b:21:33', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0a88b707-352e-4be7-b1d6-ad6074b40ed9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9cd0704c-b882-4e84-a2fd-533974d3bbee', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2206.085763] env[62405]: DEBUG oslo.service.loopingcall [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2206.085961] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2206.086193] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9b1d5d9a-e7f9-4bcf-a2eb-38e0bad9927d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2206.107493] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2206.107493] env[62405]: value = "task-1948388" [ 2206.107493] env[62405]: _type = "Task" [ 2206.107493] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2206.114824] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1948388, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2206.263079] env[62405]: DEBUG oslo_vmware.api [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52fa3322-9070-e91b-30f6-5e4927551d7f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2206.364730] env[62405]: DEBUG nova.compute.manager [req-608e6488-56da-419a-bdc6-b4fda28dbbb6 req-57ec894d-6510-4f18-ba4f-ed30317b595d service nova] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Received event network-changed-9cd0704c-b882-4e84-a2fd-533974d3bbee {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2206.364730] env[62405]: DEBUG nova.compute.manager [req-608e6488-56da-419a-bdc6-b4fda28dbbb6 req-57ec894d-6510-4f18-ba4f-ed30317b595d service nova] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Refreshing instance network info cache due to event network-changed-9cd0704c-b882-4e84-a2fd-533974d3bbee. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 2206.364866] env[62405]: DEBUG oslo_concurrency.lockutils [req-608e6488-56da-419a-bdc6-b4fda28dbbb6 req-57ec894d-6510-4f18-ba4f-ed30317b595d service nova] Acquiring lock "refresh_cache-27928d2b-1ed5-4326-81e4-1dade794c6a7" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2206.364978] env[62405]: DEBUG oslo_concurrency.lockutils [req-608e6488-56da-419a-bdc6-b4fda28dbbb6 req-57ec894d-6510-4f18-ba4f-ed30317b595d service nova] Acquired lock "refresh_cache-27928d2b-1ed5-4326-81e4-1dade794c6a7" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2206.365142] env[62405]: DEBUG nova.network.neutron [req-608e6488-56da-419a-bdc6-b4fda28dbbb6 req-57ec894d-6510-4f18-ba4f-ed30317b595d service nova] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Refreshing network info cache for port 9cd0704c-b882-4e84-a2fd-533974d3bbee {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2206.618066] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1948388, 'name': CreateVM_Task, 'duration_secs': 0.400089} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2206.618066] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2206.618302] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2206.618302] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2206.618608] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2206.618860] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-212ab1b6-4600-4dde-bb52-7a86308a4230 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2206.623295] env[62405]: DEBUG oslo_vmware.api [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Waiting for the task: (returnval){ [ 2206.623295] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52f0798e-2861-28d3-1427-bae9e704f4ac" [ 2206.623295] env[62405]: _type = "Task" [ 2206.623295] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2206.630530] env[62405]: DEBUG oslo_vmware.api [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52f0798e-2861-28d3-1427-bae9e704f4ac, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2206.763829] env[62405]: DEBUG oslo_vmware.api [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52fa3322-9070-e91b-30f6-5e4927551d7f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2207.064708] env[62405]: DEBUG nova.network.neutron [req-608e6488-56da-419a-bdc6-b4fda28dbbb6 req-57ec894d-6510-4f18-ba4f-ed30317b595d service nova] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Updated VIF entry in instance network info cache for port 9cd0704c-b882-4e84-a2fd-533974d3bbee. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2207.065085] env[62405]: DEBUG nova.network.neutron [req-608e6488-56da-419a-bdc6-b4fda28dbbb6 req-57ec894d-6510-4f18-ba4f-ed30317b595d service nova] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Updating instance_info_cache with network_info: [{"id": "9cd0704c-b882-4e84-a2fd-533974d3bbee", "address": "fa:16:3e:7b:21:33", "network": {"id": "f9883b88-e1ff-4499-9214-4cc672383a10", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1580742860-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dd9a1a4650b34e388c50c7575cf09a7c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0a88b707-352e-4be7-b1d6-ad6074b40ed9", "external-id": "nsx-vlan-transportzone-789", "segmentation_id": 789, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9cd0704c-b8", "ovs_interfaceid": "9cd0704c-b882-4e84-a2fd-533974d3bbee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2207.133473] env[62405]: DEBUG oslo_vmware.api [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52f0798e-2861-28d3-1427-bae9e704f4ac, 'name': SearchDatastore_Task, 'duration_secs': 0.211246} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2207.133759] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2207.133983] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2207.134206] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2207.265584] env[62405]: DEBUG oslo_vmware.api [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52fa3322-9070-e91b-30f6-5e4927551d7f, 'name': SearchDatastore_Task, 'duration_secs': 2.582458} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2207.266040] env[62405]: DEBUG oslo_concurrency.lockutils [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2207.266119] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] f4af587c-08d3-457e-a20d-a5ea8aad311f/f4af587c-08d3-457e-a20d-a5ea8aad311f.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2207.266404] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2207.266593] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2207.266805] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5c552469-c892-4a60-9ffd-c70ff7f1b7f7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2207.268734] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cd75cff8-9b9c-412d-b6a0-71ff4e219b76 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2207.275350] env[62405]: DEBUG oslo_vmware.api [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for the task: (returnval){ [ 2207.275350] env[62405]: value = "task-1948389" [ 2207.275350] env[62405]: _type = "Task" [ 2207.275350] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2207.278949] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2207.279140] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2207.280116] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aec500c2-4646-4c24-90a6-01ecfdb68429 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2207.284999] env[62405]: DEBUG oslo_vmware.api [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948389, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2207.287856] env[62405]: DEBUG oslo_vmware.api [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Waiting for the task: (returnval){ [ 2207.287856] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52acbe89-e0ff-b798-cfcb-4044d9626323" [ 2207.287856] env[62405]: _type = "Task" [ 2207.287856] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2207.294817] env[62405]: DEBUG oslo_vmware.api [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52acbe89-e0ff-b798-cfcb-4044d9626323, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2207.567955] env[62405]: DEBUG oslo_concurrency.lockutils [req-608e6488-56da-419a-bdc6-b4fda28dbbb6 req-57ec894d-6510-4f18-ba4f-ed30317b595d service nova] Releasing lock "refresh_cache-27928d2b-1ed5-4326-81e4-1dade794c6a7" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2207.785527] env[62405]: DEBUG oslo_vmware.api [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948389, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.418115} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2207.785796] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] f4af587c-08d3-457e-a20d-a5ea8aad311f/f4af587c-08d3-457e-a20d-a5ea8aad311f.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2207.786240] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2207.786286] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-839c8332-8851-4826-b9c9-c900be14d723 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2207.797435] env[62405]: DEBUG oslo_vmware.api [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52acbe89-e0ff-b798-cfcb-4044d9626323, 'name': SearchDatastore_Task, 'duration_secs': 0.008429} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2207.799065] env[62405]: DEBUG oslo_vmware.api [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for the task: (returnval){ [ 2207.799065] env[62405]: value = "task-1948390" [ 2207.799065] env[62405]: _type = "Task" [ 2207.799065] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2207.799271] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-83870d08-a8c6-4e23-b021-5f77785e8680 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2207.809594] env[62405]: DEBUG oslo_vmware.api [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948390, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2207.809888] env[62405]: DEBUG oslo_vmware.api [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Waiting for the task: (returnval){ [ 2207.809888] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52580b76-c52c-cb04-9ed4-ef9e99d54b92" [ 2207.809888] env[62405]: _type = "Task" [ 2207.809888] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2207.817509] env[62405]: DEBUG oslo_vmware.api [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52580b76-c52c-cb04-9ed4-ef9e99d54b92, 'name': SearchDatastore_Task, 'duration_secs': 0.008852} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2207.817736] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2207.818034] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 27928d2b-1ed5-4326-81e4-1dade794c6a7/27928d2b-1ed5-4326-81e4-1dade794c6a7.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2207.818234] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0a91f41e-f1a1-4ee5-83ae-0ce25cf69e13 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2207.824349] env[62405]: DEBUG oslo_vmware.api [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Waiting for the task: (returnval){ [ 2207.824349] env[62405]: value = "task-1948391" [ 2207.824349] env[62405]: _type = "Task" [ 2207.824349] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2207.831520] env[62405]: DEBUG oslo_vmware.api [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948391, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2208.311034] env[62405]: DEBUG oslo_vmware.api [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948390, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071258} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2208.311592] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2208.311880] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14d81cdc-85d6-4ee7-9094-68102e7493a9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2208.333343] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Reconfiguring VM instance instance-00000078 to attach disk [datastore1] f4af587c-08d3-457e-a20d-a5ea8aad311f/f4af587c-08d3-457e-a20d-a5ea8aad311f.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2208.336413] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c1305dd9-b91f-457c-a6a5-ea02bc6cdd51 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2208.354986] env[62405]: DEBUG oslo_vmware.api [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948391, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.390037} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2208.356099] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 27928d2b-1ed5-4326-81e4-1dade794c6a7/27928d2b-1ed5-4326-81e4-1dade794c6a7.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2208.356312] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2208.356596] env[62405]: DEBUG oslo_vmware.api [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for the task: (returnval){ [ 2208.356596] env[62405]: value = "task-1948392" [ 2208.356596] env[62405]: _type = "Task" [ 2208.356596] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2208.356775] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fb496a83-65de-44dd-a808-36c341369aa0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2208.365895] env[62405]: DEBUG oslo_vmware.api [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948392, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2208.367052] env[62405]: DEBUG oslo_vmware.api [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Waiting for the task: (returnval){ [ 2208.367052] env[62405]: value = "task-1948393" [ 2208.367052] env[62405]: _type = "Task" [ 2208.367052] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2208.375146] env[62405]: DEBUG oslo_vmware.api [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948393, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2208.868714] env[62405]: DEBUG oslo_vmware.api [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948392, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2208.876432] env[62405]: DEBUG oslo_vmware.api [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948393, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.060167} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2208.876680] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2208.877420] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04894b26-8e77-429e-9b81-78ada0c9c742 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2208.899386] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Reconfiguring VM instance instance-00000079 to attach disk [datastore1] 27928d2b-1ed5-4326-81e4-1dade794c6a7/27928d2b-1ed5-4326-81e4-1dade794c6a7.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2208.899748] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-79d5a52b-40eb-482f-9155-0aee4c6ba398 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2208.918337] env[62405]: DEBUG oslo_vmware.api [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Waiting for the task: (returnval){ [ 2208.918337] env[62405]: value = "task-1948394" [ 2208.918337] env[62405]: _type = "Task" [ 2208.918337] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2208.925609] env[62405]: DEBUG oslo_vmware.api [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948394, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2209.370285] env[62405]: DEBUG oslo_vmware.api [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948392, 'name': ReconfigVM_Task, 'duration_secs': 0.540754} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2209.370666] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Reconfigured VM instance instance-00000078 to attach disk [datastore1] f4af587c-08d3-457e-a20d-a5ea8aad311f/f4af587c-08d3-457e-a20d-a5ea8aad311f.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2209.371666] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'encryption_options': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'guest_format': None, 'device_name': '/dev/sda', 'encrypted': False, 'device_type': 'disk', 'size': 0, 'disk_bus': None, 'encryption_format': None, 'image_id': 'e6bba7a8-c2de-41dc-871a-3859bba5f4f9'}], 'ephemerals': [], 'block_device_mapping': [{'boot_index': None, 'delete_on_termination': False, 'guest_format': None, 'mount_device': '/dev/sdb', 'device_type': None, 'disk_bus': None, 'attachment_id': 'c7869a92-39aa-4a1e-840c-a53e6a29a503', 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-401615', 'volume_id': '940b01a7-ec3e-472d-9a1f-d3374b7bb430', 'name': 'volume-940b01a7-ec3e-472d-9a1f-d3374b7bb430', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f4af587c-08d3-457e-a20d-a5ea8aad311f', 'attached_at': '', 'detached_at': '', 'volume_id': '940b01a7-ec3e-472d-9a1f-d3374b7bb430', 'serial': '940b01a7-ec3e-472d-9a1f-d3374b7bb430'}, 'volume_type': None}], 'swap': None} {{(pid=62405) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 2209.371866] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Volume attach. Driver type: vmdk {{(pid=62405) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 2209.372065] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-401615', 'volume_id': '940b01a7-ec3e-472d-9a1f-d3374b7bb430', 'name': 'volume-940b01a7-ec3e-472d-9a1f-d3374b7bb430', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f4af587c-08d3-457e-a20d-a5ea8aad311f', 'attached_at': '', 'detached_at': '', 'volume_id': '940b01a7-ec3e-472d-9a1f-d3374b7bb430', 'serial': '940b01a7-ec3e-472d-9a1f-d3374b7bb430'} {{(pid=62405) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 2209.372822] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c817b0c3-3954-48c2-8b92-89654bf5dcba {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2209.387066] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2544e7b2-9792-47fe-8aec-8c3dffccee15 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2209.410147] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Reconfiguring VM instance instance-00000078 to attach disk [datastore1] volume-940b01a7-ec3e-472d-9a1f-d3374b7bb430/volume-940b01a7-ec3e-472d-9a1f-d3374b7bb430.vmdk or device None with type thin {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2209.410394] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ea732b1d-b07e-40f2-b24f-22062fd8ddbd {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2209.431615] env[62405]: DEBUG oslo_vmware.api [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948394, 'name': ReconfigVM_Task, 'duration_secs': 0.268451} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2209.432740] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Reconfigured VM instance instance-00000079 to attach disk [datastore1] 27928d2b-1ed5-4326-81e4-1dade794c6a7/27928d2b-1ed5-4326-81e4-1dade794c6a7.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2209.433390] env[62405]: DEBUG oslo_vmware.api [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for the task: (returnval){ [ 2209.433390] env[62405]: value = "task-1948395" [ 2209.433390] env[62405]: _type = "Task" [ 2209.433390] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2209.433595] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7fc8cc5a-8015-4e11-a9a9-e3f76bcf1aed {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2209.442178] env[62405]: DEBUG oslo_vmware.api [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948395, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2209.443226] env[62405]: DEBUG oslo_vmware.api [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Waiting for the task: (returnval){ [ 2209.443226] env[62405]: value = "task-1948396" [ 2209.443226] env[62405]: _type = "Task" [ 2209.443226] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2209.449872] env[62405]: DEBUG oslo_vmware.api [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948396, 'name': Rename_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2209.944440] env[62405]: DEBUG oslo_vmware.api [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948395, 'name': ReconfigVM_Task, 'duration_secs': 0.282572} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2209.947270] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Reconfigured VM instance instance-00000078 to attach disk [datastore1] volume-940b01a7-ec3e-472d-9a1f-d3374b7bb430/volume-940b01a7-ec3e-472d-9a1f-d3374b7bb430.vmdk or device None with type thin {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2209.951859] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-35714d8e-bdef-407f-af70-aaef8bad0c30 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2209.966621] env[62405]: DEBUG oslo_vmware.api [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948396, 'name': Rename_Task, 'duration_secs': 0.285726} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2209.967762] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2209.968077] env[62405]: DEBUG oslo_vmware.api [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for the task: (returnval){ [ 2209.968077] env[62405]: value = "task-1948397" [ 2209.968077] env[62405]: _type = "Task" [ 2209.968077] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2209.968257] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d6628414-72e1-4a54-983e-aa36a7bce790 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2209.976998] env[62405]: DEBUG oslo_vmware.api [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948397, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2209.978099] env[62405]: DEBUG oslo_vmware.api [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Waiting for the task: (returnval){ [ 2209.978099] env[62405]: value = "task-1948398" [ 2209.978099] env[62405]: _type = "Task" [ 2209.978099] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2209.985621] env[62405]: DEBUG oslo_vmware.api [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948398, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2210.479775] env[62405]: DEBUG oslo_vmware.api [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948397, 'name': ReconfigVM_Task, 'duration_secs': 0.141119} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2210.482592] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-401615', 'volume_id': '940b01a7-ec3e-472d-9a1f-d3374b7bb430', 'name': 'volume-940b01a7-ec3e-472d-9a1f-d3374b7bb430', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f4af587c-08d3-457e-a20d-a5ea8aad311f', 'attached_at': '', 'detached_at': '', 'volume_id': '940b01a7-ec3e-472d-9a1f-d3374b7bb430', 'serial': '940b01a7-ec3e-472d-9a1f-d3374b7bb430'} {{(pid=62405) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 2210.483140] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f5cd3225-7ff2-4ea1-9102-f6f4884a2027 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2210.489833] env[62405]: DEBUG oslo_vmware.api [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948398, 'name': PowerOnVM_Task, 'duration_secs': 0.432474} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2210.490842] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2210.491066] env[62405]: INFO nova.compute.manager [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Took 6.44 seconds to spawn the instance on the hypervisor. [ 2210.491253] env[62405]: DEBUG nova.compute.manager [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2210.491571] env[62405]: DEBUG oslo_vmware.api [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for the task: (returnval){ [ 2210.491571] env[62405]: value = "task-1948399" [ 2210.491571] env[62405]: _type = "Task" [ 2210.491571] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2210.492211] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51a002fe-c21b-4e54-820c-1f6e36e3a4af {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2210.505032] env[62405]: DEBUG oslo_vmware.api [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948399, 'name': Rename_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2211.004799] env[62405]: DEBUG oslo_vmware.api [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948399, 'name': Rename_Task, 'duration_secs': 0.146762} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2211.008064] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2211.010468] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-85289c0d-5f8f-4a54-92f8-1dfe388e6445 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2211.017613] env[62405]: INFO nova.compute.manager [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Took 11.84 seconds to build instance. [ 2211.020826] env[62405]: DEBUG oslo_vmware.api [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for the task: (returnval){ [ 2211.020826] env[62405]: value = "task-1948400" [ 2211.020826] env[62405]: _type = "Task" [ 2211.020826] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2211.031592] env[62405]: DEBUG oslo_vmware.api [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948400, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2211.187167] env[62405]: DEBUG nova.compute.manager [req-b0fc510c-cb0d-4fc8-b286-d0bea6ed76c9 req-6a9bfbc5-25b3-4280-947e-c05c9d18eddd service nova] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Received event network-changed-9cd0704c-b882-4e84-a2fd-533974d3bbee {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2211.187392] env[62405]: DEBUG nova.compute.manager [req-b0fc510c-cb0d-4fc8-b286-d0bea6ed76c9 req-6a9bfbc5-25b3-4280-947e-c05c9d18eddd service nova] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Refreshing instance network info cache due to event network-changed-9cd0704c-b882-4e84-a2fd-533974d3bbee. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 2211.187631] env[62405]: DEBUG oslo_concurrency.lockutils [req-b0fc510c-cb0d-4fc8-b286-d0bea6ed76c9 req-6a9bfbc5-25b3-4280-947e-c05c9d18eddd service nova] Acquiring lock "refresh_cache-27928d2b-1ed5-4326-81e4-1dade794c6a7" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2211.187790] env[62405]: DEBUG oslo_concurrency.lockutils [req-b0fc510c-cb0d-4fc8-b286-d0bea6ed76c9 req-6a9bfbc5-25b3-4280-947e-c05c9d18eddd service nova] Acquired lock "refresh_cache-27928d2b-1ed5-4326-81e4-1dade794c6a7" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2211.188088] env[62405]: DEBUG nova.network.neutron [req-b0fc510c-cb0d-4fc8-b286-d0bea6ed76c9 req-6a9bfbc5-25b3-4280-947e-c05c9d18eddd service nova] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Refreshing network info cache for port 9cd0704c-b882-4e84-a2fd-533974d3bbee {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2211.519891] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7f4e21c8-8bc2-4cf7-a776-69c783aa86fa tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Lock "27928d2b-1ed5-4326-81e4-1dade794c6a7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.352s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2211.530046] env[62405]: DEBUG oslo_vmware.api [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948400, 'name': PowerOnVM_Task, 'duration_secs': 0.435968} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2211.530304] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2211.530511] env[62405]: DEBUG nova.compute.manager [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2211.531248] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baa01c88-08f2-42c0-9c71-8444c555f8fe {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2211.894022] env[62405]: DEBUG nova.network.neutron [req-b0fc510c-cb0d-4fc8-b286-d0bea6ed76c9 req-6a9bfbc5-25b3-4280-947e-c05c9d18eddd service nova] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Updated VIF entry in instance network info cache for port 9cd0704c-b882-4e84-a2fd-533974d3bbee. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2211.894410] env[62405]: DEBUG nova.network.neutron [req-b0fc510c-cb0d-4fc8-b286-d0bea6ed76c9 req-6a9bfbc5-25b3-4280-947e-c05c9d18eddd service nova] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Updating instance_info_cache with network_info: [{"id": "9cd0704c-b882-4e84-a2fd-533974d3bbee", "address": "fa:16:3e:7b:21:33", "network": {"id": "f9883b88-e1ff-4499-9214-4cc672383a10", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1580742860-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dd9a1a4650b34e388c50c7575cf09a7c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0a88b707-352e-4be7-b1d6-ad6074b40ed9", "external-id": "nsx-vlan-transportzone-789", "segmentation_id": 789, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9cd0704c-b8", "ovs_interfaceid": "9cd0704c-b882-4e84-a2fd-533974d3bbee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2212.048901] env[62405]: DEBUG oslo_concurrency.lockutils [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2212.049167] env[62405]: DEBUG oslo_concurrency.lockutils [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2212.049384] env[62405]: DEBUG nova.objects.instance [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62405) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 2212.397145] env[62405]: DEBUG oslo_concurrency.lockutils [req-b0fc510c-cb0d-4fc8-b286-d0bea6ed76c9 req-6a9bfbc5-25b3-4280-947e-c05c9d18eddd service nova] Releasing lock "refresh_cache-27928d2b-1ed5-4326-81e4-1dade794c6a7" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2213.059316] env[62405]: DEBUG oslo_concurrency.lockutils [None req-6095683f-a916-46f2-911a-7f6bf704c2a4 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.010s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2220.778110] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2220.778498] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2220.778498] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Starting heal instance info cache {{(pid=62405) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10446}} [ 2220.778627] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Rebuilding the list of instances to heal {{(pid=62405) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10450}} [ 2221.308877] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Acquiring lock "refresh_cache-46b794f6-e858-45e6-9977-98ab246482f3" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2221.309048] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Acquired lock "refresh_cache-46b794f6-e858-45e6-9977-98ab246482f3" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2221.309215] env[62405]: DEBUG nova.network.neutron [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 46b794f6-e858-45e6-9977-98ab246482f3] Forcefully refreshing network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 2221.309399] env[62405]: DEBUG nova.objects.instance [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lazy-loading 'info_cache' on Instance uuid 46b794f6-e858-45e6-9977-98ab246482f3 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2223.028125] env[62405]: DEBUG nova.network.neutron [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 46b794f6-e858-45e6-9977-98ab246482f3] Updating instance_info_cache with network_info: [{"id": "1cb8209b-2a23-499d-b852-91ad4d89784e", "address": "fa:16:3e:c5:15:8c", "network": {"id": "a7be14dc-1fb5-4a85-a574-ff7086958535", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-359688506-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.188", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03a423f493034065bb1591d14d215ed8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77aa121f-8fb6-42f3-aaea-43addfe449b2", "external-id": "nsx-vlan-transportzone-288", "segmentation_id": 288, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1cb8209b-2a", "ovs_interfaceid": "1cb8209b-2a23-499d-b852-91ad4d89784e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2223.531253] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Releasing lock "refresh_cache-46b794f6-e858-45e6-9977-98ab246482f3" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2223.531539] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 46b794f6-e858-45e6-9977-98ab246482f3] Updated the network info_cache for instance {{(pid=62405) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10517}} [ 2223.531730] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2223.531892] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2223.532051] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2223.532201] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2223.532341] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2223.532511] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2223.532640] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62405) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11065}} [ 2223.532786] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager.update_available_resource {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2224.035604] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2224.036041] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2224.036041] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2224.036217] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62405) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2224.037052] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80fd5dd2-3618-4e8c-a11c-4a8a895d30fa {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2224.045384] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ac1031a-fd3d-4e2e-9074-90a30a37f653 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2224.058846] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15b49487-4c80-402e-bd54-a76556bea874 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2224.064791] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d405bc3d-3f32-48a2-9acc-92faed8a35ed {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2224.093534] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180511MB free_disk=171GB free_vcpus=48 pci_devices=None {{(pid=62405) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2224.093691] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2224.093855] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2224.322387] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2b203a29-9572-4d90-a7e7-5a88fc90aab7 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Acquiring lock "8185f9bc-48d5-4cb7-a48d-f744ff704868" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2224.322670] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2b203a29-9572-4d90-a7e7-5a88fc90aab7 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Lock "8185f9bc-48d5-4cb7-a48d-f744ff704868" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2224.322846] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2b203a29-9572-4d90-a7e7-5a88fc90aab7 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Acquiring lock "8185f9bc-48d5-4cb7-a48d-f744ff704868-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2224.323029] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2b203a29-9572-4d90-a7e7-5a88fc90aab7 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Lock "8185f9bc-48d5-4cb7-a48d-f744ff704868-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2224.323202] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2b203a29-9572-4d90-a7e7-5a88fc90aab7 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Lock "8185f9bc-48d5-4cb7-a48d-f744ff704868-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2224.325381] env[62405]: INFO nova.compute.manager [None req-2b203a29-9572-4d90-a7e7-5a88fc90aab7 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Terminating instance [ 2224.829238] env[62405]: DEBUG nova.compute.manager [None req-2b203a29-9572-4d90-a7e7-5a88fc90aab7 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2224.829537] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-2b203a29-9572-4d90-a7e7-5a88fc90aab7 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2224.830412] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-420c8374-fdda-48c2-b246-27a13320f004 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2224.838141] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b203a29-9572-4d90-a7e7-5a88fc90aab7 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2224.838370] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5c1cb9ab-c13e-4f37-8474-46a63b349d15 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2224.844494] env[62405]: DEBUG oslo_vmware.api [None req-2b203a29-9572-4d90-a7e7-5a88fc90aab7 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Waiting for the task: (returnval){ [ 2224.844494] env[62405]: value = "task-1948401" [ 2224.844494] env[62405]: _type = "Task" [ 2224.844494] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2224.852034] env[62405]: DEBUG oslo_vmware.api [None req-2b203a29-9572-4d90-a7e7-5a88fc90aab7 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948401, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2225.118952] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 46b794f6-e858-45e6-9977-98ab246482f3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2225.119219] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance f4af587c-08d3-457e-a20d-a5ea8aad311f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2225.119261] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 8185f9bc-48d5-4cb7-a48d-f744ff704868 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2225.119389] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 27928d2b-1ed5-4326-81e4-1dade794c6a7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2225.119606] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Total usable vcpus: 48, total allocated vcpus: 4 {{(pid=62405) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2225.119750] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1280MB phys_disk=200GB used_disk=4GB total_vcpus=48 used_vcpus=4 pci_stats=[] {{(pid=62405) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2225.172041] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a7f9000-64fd-4268-bf51-42cd187cdc38 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2225.179357] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd2875c7-e363-4b00-a249-e13213528a04 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2225.208013] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e006d2ce-2324-49b3-952e-f4d4762685e2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2225.215156] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d8904ab-5a7a-4e96-8ea2-a5646e71c18e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2225.228607] env[62405]: DEBUG nova.compute.provider_tree [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2225.354687] env[62405]: DEBUG oslo_vmware.api [None req-2b203a29-9572-4d90-a7e7-5a88fc90aab7 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948401, 'name': PowerOffVM_Task, 'duration_secs': 0.210853} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2225.354970] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b203a29-9572-4d90-a7e7-5a88fc90aab7 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2225.355170] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-2b203a29-9572-4d90-a7e7-5a88fc90aab7 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2225.355409] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-aa1f784a-fe78-473a-9f62-43d50f6b830f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2225.448224] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-2b203a29-9572-4d90-a7e7-5a88fc90aab7 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2225.448471] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-2b203a29-9572-4d90-a7e7-5a88fc90aab7 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2225.448626] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b203a29-9572-4d90-a7e7-5a88fc90aab7 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Deleting the datastore file [datastore1] 8185f9bc-48d5-4cb7-a48d-f744ff704868 {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2225.448894] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7e123fe0-e71a-4ed9-ac81-c214a80a896a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2225.455457] env[62405]: DEBUG oslo_vmware.api [None req-2b203a29-9572-4d90-a7e7-5a88fc90aab7 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Waiting for the task: (returnval){ [ 2225.455457] env[62405]: value = "task-1948403" [ 2225.455457] env[62405]: _type = "Task" [ 2225.455457] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2225.462809] env[62405]: DEBUG oslo_vmware.api [None req-2b203a29-9572-4d90-a7e7-5a88fc90aab7 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948403, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2225.759128] env[62405]: DEBUG nova.scheduler.client.report [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Updated inventory for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with generation 192 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 2225.759362] env[62405]: DEBUG nova.compute.provider_tree [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Updating resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 generation from 192 to 193 during operation: update_inventory {{(pid=62405) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2225.759539] env[62405]: DEBUG nova.compute.provider_tree [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2225.964885] env[62405]: DEBUG oslo_vmware.api [None req-2b203a29-9572-4d90-a7e7-5a88fc90aab7 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Task: {'id': task-1948403, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.125302} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2225.965163] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b203a29-9572-4d90-a7e7-5a88fc90aab7 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2225.965353] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-2b203a29-9572-4d90-a7e7-5a88fc90aab7 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2225.965529] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-2b203a29-9572-4d90-a7e7-5a88fc90aab7 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2225.965714] env[62405]: INFO nova.compute.manager [None req-2b203a29-9572-4d90-a7e7-5a88fc90aab7 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Took 1.14 seconds to destroy the instance on the hypervisor. [ 2225.965941] env[62405]: DEBUG oslo.service.loopingcall [None req-2b203a29-9572-4d90-a7e7-5a88fc90aab7 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2225.966148] env[62405]: DEBUG nova.compute.manager [-] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2225.966243] env[62405]: DEBUG nova.network.neutron [-] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2226.265164] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62405) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2226.265501] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.171s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2226.427264] env[62405]: DEBUG nova.compute.manager [req-7e8d6fde-cf84-40b4-8d80-8fd5f2415f57 req-9bb904e8-7d23-47fa-a6d6-71ae77269688 service nova] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Received event network-vif-deleted-9ccf45be-5a2c-4a79-862c-d1b26508863f {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2226.427264] env[62405]: INFO nova.compute.manager [req-7e8d6fde-cf84-40b4-8d80-8fd5f2415f57 req-9bb904e8-7d23-47fa-a6d6-71ae77269688 service nova] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Neutron deleted interface 9ccf45be-5a2c-4a79-862c-d1b26508863f; detaching it from the instance and deleting it from the info cache [ 2226.427264] env[62405]: DEBUG nova.network.neutron [req-7e8d6fde-cf84-40b4-8d80-8fd5f2415f57 req-9bb904e8-7d23-47fa-a6d6-71ae77269688 service nova] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2226.907743] env[62405]: DEBUG nova.network.neutron [-] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2226.929995] env[62405]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-557405e6-953c-47fe-bb17-99277e32e360 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2226.939192] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87eeca7d-253d-4eb0-afad-52e0cb5403d3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2226.964152] env[62405]: DEBUG nova.compute.manager [req-7e8d6fde-cf84-40b4-8d80-8fd5f2415f57 req-9bb904e8-7d23-47fa-a6d6-71ae77269688 service nova] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Detach interface failed, port_id=9ccf45be-5a2c-4a79-862c-d1b26508863f, reason: Instance 8185f9bc-48d5-4cb7-a48d-f744ff704868 could not be found. {{(pid=62405) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11483}} [ 2227.410295] env[62405]: INFO nova.compute.manager [-] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Took 1.44 seconds to deallocate network for instance. [ 2227.916288] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2b203a29-9572-4d90-a7e7-5a88fc90aab7 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2227.916554] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2b203a29-9572-4d90-a7e7-5a88fc90aab7 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2227.916786] env[62405]: DEBUG nova.objects.instance [None req-2b203a29-9572-4d90-a7e7-5a88fc90aab7 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Lazy-loading 'resources' on Instance uuid 8185f9bc-48d5-4cb7-a48d-f744ff704868 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2228.506930] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42312535-6c37-4eba-9b54-dfdab3232672 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2228.515145] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-233c65ac-7ffa-436e-a314-c4193cea3010 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2228.545402] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1623066f-4e66-4626-843a-27734040c056 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2228.553467] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3b47bc6-6c6e-4c20-a125-b498f774fe82 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2228.567925] env[62405]: DEBUG nova.compute.provider_tree [None req-2b203a29-9572-4d90-a7e7-5a88fc90aab7 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2229.071137] env[62405]: DEBUG nova.scheduler.client.report [None req-2b203a29-9572-4d90-a7e7-5a88fc90aab7 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2229.576779] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2b203a29-9572-4d90-a7e7-5a88fc90aab7 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.660s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2229.599260] env[62405]: INFO nova.scheduler.client.report [None req-2b203a29-9572-4d90-a7e7-5a88fc90aab7 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Deleted allocations for instance 8185f9bc-48d5-4cb7-a48d-f744ff704868 [ 2230.109609] env[62405]: DEBUG oslo_concurrency.lockutils [None req-2b203a29-9572-4d90-a7e7-5a88fc90aab7 tempest-AttachVolumeShelveTestJSON-1446613190 tempest-AttachVolumeShelveTestJSON-1446613190-project-member] Lock "8185f9bc-48d5-4cb7-a48d-f744ff704868" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.787s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2232.883069] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2247.245604] env[62405]: DEBUG oslo_concurrency.lockutils [None req-69661522-b086-4c35-82aa-a74f59d9ab57 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Acquiring lock "f4af587c-08d3-457e-a20d-a5ea8aad311f" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2247.246032] env[62405]: DEBUG oslo_concurrency.lockutils [None req-69661522-b086-4c35-82aa-a74f59d9ab57 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Lock "f4af587c-08d3-457e-a20d-a5ea8aad311f" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2247.749534] env[62405]: INFO nova.compute.manager [None req-69661522-b086-4c35-82aa-a74f59d9ab57 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Detaching volume 940b01a7-ec3e-472d-9a1f-d3374b7bb430 [ 2247.779633] env[62405]: INFO nova.virt.block_device [None req-69661522-b086-4c35-82aa-a74f59d9ab57 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Attempting to driver detach volume 940b01a7-ec3e-472d-9a1f-d3374b7bb430 from mountpoint /dev/sdb [ 2247.779877] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-69661522-b086-4c35-82aa-a74f59d9ab57 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Volume detach. Driver type: vmdk {{(pid=62405) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2247.780079] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-69661522-b086-4c35-82aa-a74f59d9ab57 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-401615', 'volume_id': '940b01a7-ec3e-472d-9a1f-d3374b7bb430', 'name': 'volume-940b01a7-ec3e-472d-9a1f-d3374b7bb430', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f4af587c-08d3-457e-a20d-a5ea8aad311f', 'attached_at': '', 'detached_at': '', 'volume_id': '940b01a7-ec3e-472d-9a1f-d3374b7bb430', 'serial': '940b01a7-ec3e-472d-9a1f-d3374b7bb430'} {{(pid=62405) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2247.780964] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f53d1aa1-25fd-4f70-8ea6-f9a0df72f7e8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2247.801902] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d400d8f1-18b5-4f50-b458-edd5fc00cc8b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2247.808615] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-908bbf11-f237-476c-b846-aacc50a9cd98 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2247.827868] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cab013d5-edbe-4d2d-94cc-12e4ddfd3387 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2247.842865] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-69661522-b086-4c35-82aa-a74f59d9ab57 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] The volume has not been displaced from its original location: [datastore1] volume-940b01a7-ec3e-472d-9a1f-d3374b7bb430/volume-940b01a7-ec3e-472d-9a1f-d3374b7bb430.vmdk. No consolidation needed. {{(pid=62405) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2247.847910] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-69661522-b086-4c35-82aa-a74f59d9ab57 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Reconfiguring VM instance instance-00000078 to detach disk 2001 {{(pid=62405) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2247.848166] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d4ed6252-f632-4c8e-991d-09fb1020590b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2247.866849] env[62405]: DEBUG oslo_vmware.api [None req-69661522-b086-4c35-82aa-a74f59d9ab57 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for the task: (returnval){ [ 2247.866849] env[62405]: value = "task-1948405" [ 2247.866849] env[62405]: _type = "Task" [ 2247.866849] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2247.874155] env[62405]: DEBUG oslo_vmware.api [None req-69661522-b086-4c35-82aa-a74f59d9ab57 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948405, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2248.376509] env[62405]: DEBUG oslo_vmware.api [None req-69661522-b086-4c35-82aa-a74f59d9ab57 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948405, 'name': ReconfigVM_Task, 'duration_secs': 0.197683} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2248.376813] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-69661522-b086-4c35-82aa-a74f59d9ab57 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Reconfigured VM instance instance-00000078 to detach disk 2001 {{(pid=62405) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2248.381314] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c95a3308-4455-4403-931f-51d9e0d5636c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2248.396948] env[62405]: DEBUG oslo_vmware.api [None req-69661522-b086-4c35-82aa-a74f59d9ab57 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for the task: (returnval){ [ 2248.396948] env[62405]: value = "task-1948406" [ 2248.396948] env[62405]: _type = "Task" [ 2248.396948] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2248.404470] env[62405]: DEBUG oslo_vmware.api [None req-69661522-b086-4c35-82aa-a74f59d9ab57 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948406, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2248.907642] env[62405]: DEBUG oslo_vmware.api [None req-69661522-b086-4c35-82aa-a74f59d9ab57 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948406, 'name': ReconfigVM_Task, 'duration_secs': 0.130496} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2248.907865] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-69661522-b086-4c35-82aa-a74f59d9ab57 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-401615', 'volume_id': '940b01a7-ec3e-472d-9a1f-d3374b7bb430', 'name': 'volume-940b01a7-ec3e-472d-9a1f-d3374b7bb430', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'f4af587c-08d3-457e-a20d-a5ea8aad311f', 'attached_at': '', 'detached_at': '', 'volume_id': '940b01a7-ec3e-472d-9a1f-d3374b7bb430', 'serial': '940b01a7-ec3e-472d-9a1f-d3374b7bb430'} {{(pid=62405) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2249.451314] env[62405]: DEBUG nova.objects.instance [None req-69661522-b086-4c35-82aa-a74f59d9ab57 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Lazy-loading 'flavor' on Instance uuid f4af587c-08d3-457e-a20d-a5ea8aad311f {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2249.497759] env[62405]: DEBUG nova.compute.manager [None req-8adcc4f8-5314-4b52-807d-3e13f1ad45e9 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Stashing vm_state: active {{(pid=62405) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 2250.017052] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8adcc4f8-5314-4b52-807d-3e13f1ad45e9 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2250.017325] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8adcc4f8-5314-4b52-807d-3e13f1ad45e9 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2250.463745] env[62405]: DEBUG oslo_concurrency.lockutils [None req-69661522-b086-4c35-82aa-a74f59d9ab57 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Lock "f4af587c-08d3-457e-a20d-a5ea8aad311f" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.218s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2250.522107] env[62405]: INFO nova.compute.claims [None req-8adcc4f8-5314-4b52-807d-3e13f1ad45e9 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2251.028154] env[62405]: INFO nova.compute.resource_tracker [None req-8adcc4f8-5314-4b52-807d-3e13f1ad45e9 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Updating resource usage from migration c84580de-adad-4523-9937-1b28501870d6 [ 2251.083302] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ab46a9e-f450-4780-85c9-5f251adc93eb {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2251.090502] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0684f784-44bd-4fb1-87ed-81c74b2c052f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2251.119344] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-055e98cb-b6f2-4c8d-a05c-24945e76f8dd {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2251.126221] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c71c919c-1335-4631-a6a6-a98a6e9af909 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2251.138754] env[62405]: DEBUG nova.compute.provider_tree [None req-8adcc4f8-5314-4b52-807d-3e13f1ad45e9 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2251.486856] env[62405]: DEBUG oslo_concurrency.lockutils [None req-153f1e45-2de3-4aff-8c4e-b20b69281709 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Acquiring lock "f4af587c-08d3-457e-a20d-a5ea8aad311f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2251.487249] env[62405]: DEBUG oslo_concurrency.lockutils [None req-153f1e45-2de3-4aff-8c4e-b20b69281709 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Lock "f4af587c-08d3-457e-a20d-a5ea8aad311f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2251.487389] env[62405]: DEBUG oslo_concurrency.lockutils [None req-153f1e45-2de3-4aff-8c4e-b20b69281709 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Acquiring lock "f4af587c-08d3-457e-a20d-a5ea8aad311f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2251.487535] env[62405]: DEBUG oslo_concurrency.lockutils [None req-153f1e45-2de3-4aff-8c4e-b20b69281709 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Lock "f4af587c-08d3-457e-a20d-a5ea8aad311f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2251.487705] env[62405]: DEBUG oslo_concurrency.lockutils [None req-153f1e45-2de3-4aff-8c4e-b20b69281709 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Lock "f4af587c-08d3-457e-a20d-a5ea8aad311f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2251.490177] env[62405]: INFO nova.compute.manager [None req-153f1e45-2de3-4aff-8c4e-b20b69281709 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Terminating instance [ 2251.642317] env[62405]: DEBUG nova.scheduler.client.report [None req-8adcc4f8-5314-4b52-807d-3e13f1ad45e9 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2251.994310] env[62405]: DEBUG nova.compute.manager [None req-153f1e45-2de3-4aff-8c4e-b20b69281709 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2251.994569] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-153f1e45-2de3-4aff-8c4e-b20b69281709 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2251.995439] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-104d00f5-ed20-4678-8998-e04803528f7d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2252.003537] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-153f1e45-2de3-4aff-8c4e-b20b69281709 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2252.003758] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c587e34b-82d2-41ea-8a47-6b69fd035acf {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2252.009150] env[62405]: DEBUG oslo_vmware.api [None req-153f1e45-2de3-4aff-8c4e-b20b69281709 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for the task: (returnval){ [ 2252.009150] env[62405]: value = "task-1948407" [ 2252.009150] env[62405]: _type = "Task" [ 2252.009150] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2252.016384] env[62405]: DEBUG oslo_vmware.api [None req-153f1e45-2de3-4aff-8c4e-b20b69281709 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948407, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2252.148032] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8adcc4f8-5314-4b52-807d-3e13f1ad45e9 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.130s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2252.148282] env[62405]: INFO nova.compute.manager [None req-8adcc4f8-5314-4b52-807d-3e13f1ad45e9 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Migrating [ 2252.518643] env[62405]: DEBUG oslo_vmware.api [None req-153f1e45-2de3-4aff-8c4e-b20b69281709 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948407, 'name': PowerOffVM_Task, 'duration_secs': 0.170131} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2252.518911] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-153f1e45-2de3-4aff-8c4e-b20b69281709 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2252.519012] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-153f1e45-2de3-4aff-8c4e-b20b69281709 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2252.519249] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fdb34940-10f9-4912-97f1-c624a03b2b83 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2252.617265] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-153f1e45-2de3-4aff-8c4e-b20b69281709 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2252.617504] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-153f1e45-2de3-4aff-8c4e-b20b69281709 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2252.617688] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-153f1e45-2de3-4aff-8c4e-b20b69281709 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Deleting the datastore file [datastore1] f4af587c-08d3-457e-a20d-a5ea8aad311f {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2252.617954] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fa700888-5c9e-4095-a1a1-8b08514ba211 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2252.624164] env[62405]: DEBUG oslo_vmware.api [None req-153f1e45-2de3-4aff-8c4e-b20b69281709 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for the task: (returnval){ [ 2252.624164] env[62405]: value = "task-1948409" [ 2252.624164] env[62405]: _type = "Task" [ 2252.624164] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2252.632591] env[62405]: DEBUG oslo_vmware.api [None req-153f1e45-2de3-4aff-8c4e-b20b69281709 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948409, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2252.666321] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8adcc4f8-5314-4b52-807d-3e13f1ad45e9 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Acquiring lock "refresh_cache-27928d2b-1ed5-4326-81e4-1dade794c6a7" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2252.666509] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8adcc4f8-5314-4b52-807d-3e13f1ad45e9 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Acquired lock "refresh_cache-27928d2b-1ed5-4326-81e4-1dade794c6a7" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2252.666657] env[62405]: DEBUG nova.network.neutron [None req-8adcc4f8-5314-4b52-807d-3e13f1ad45e9 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2253.134168] env[62405]: DEBUG oslo_vmware.api [None req-153f1e45-2de3-4aff-8c4e-b20b69281709 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948409, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.135585} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2253.134429] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-153f1e45-2de3-4aff-8c4e-b20b69281709 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2253.134600] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-153f1e45-2de3-4aff-8c4e-b20b69281709 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2253.134779] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-153f1e45-2de3-4aff-8c4e-b20b69281709 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2253.134959] env[62405]: INFO nova.compute.manager [None req-153f1e45-2de3-4aff-8c4e-b20b69281709 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Took 1.14 seconds to destroy the instance on the hypervisor. [ 2253.135223] env[62405]: DEBUG oslo.service.loopingcall [None req-153f1e45-2de3-4aff-8c4e-b20b69281709 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2253.135437] env[62405]: DEBUG nova.compute.manager [-] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2253.135535] env[62405]: DEBUG nova.network.neutron [-] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2253.377516] env[62405]: DEBUG nova.network.neutron [None req-8adcc4f8-5314-4b52-807d-3e13f1ad45e9 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Updating instance_info_cache with network_info: [{"id": "9cd0704c-b882-4e84-a2fd-533974d3bbee", "address": "fa:16:3e:7b:21:33", "network": {"id": "f9883b88-e1ff-4499-9214-4cc672383a10", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1580742860-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dd9a1a4650b34e388c50c7575cf09a7c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0a88b707-352e-4be7-b1d6-ad6074b40ed9", "external-id": "nsx-vlan-transportzone-789", "segmentation_id": 789, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9cd0704c-b8", "ovs_interfaceid": "9cd0704c-b882-4e84-a2fd-533974d3bbee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2253.585205] env[62405]: DEBUG nova.compute.manager [req-fd313a87-b09e-4570-bc8e-09c886649203 req-2ac3610e-c6c7-421e-85ad-74e63a0f2f0d service nova] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Received event network-vif-deleted-9dc5f509-f8ba-495b-8931-0591e98d462c {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2253.585474] env[62405]: INFO nova.compute.manager [req-fd313a87-b09e-4570-bc8e-09c886649203 req-2ac3610e-c6c7-421e-85ad-74e63a0f2f0d service nova] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Neutron deleted interface 9dc5f509-f8ba-495b-8931-0591e98d462c; detaching it from the instance and deleting it from the info cache [ 2253.585586] env[62405]: DEBUG nova.network.neutron [req-fd313a87-b09e-4570-bc8e-09c886649203 req-2ac3610e-c6c7-421e-85ad-74e63a0f2f0d service nova] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2253.880567] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8adcc4f8-5314-4b52-807d-3e13f1ad45e9 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Releasing lock "refresh_cache-27928d2b-1ed5-4326-81e4-1dade794c6a7" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2254.065239] env[62405]: DEBUG nova.network.neutron [-] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2254.088368] env[62405]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-39416a4c-e31c-44ca-bcca-46f78501b043 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2254.100884] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4fda9dd-de20-448d-bae8-ef30e63e9218 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2254.134563] env[62405]: DEBUG nova.compute.manager [req-fd313a87-b09e-4570-bc8e-09c886649203 req-2ac3610e-c6c7-421e-85ad-74e63a0f2f0d service nova] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Detach interface failed, port_id=9dc5f509-f8ba-495b-8931-0591e98d462c, reason: Instance f4af587c-08d3-457e-a20d-a5ea8aad311f could not be found. {{(pid=62405) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11483}} [ 2254.568025] env[62405]: INFO nova.compute.manager [-] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Took 1.43 seconds to deallocate network for instance. [ 2255.074220] env[62405]: DEBUG oslo_concurrency.lockutils [None req-153f1e45-2de3-4aff-8c4e-b20b69281709 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2255.074516] env[62405]: DEBUG oslo_concurrency.lockutils [None req-153f1e45-2de3-4aff-8c4e-b20b69281709 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2255.074699] env[62405]: DEBUG nova.objects.instance [None req-153f1e45-2de3-4aff-8c4e-b20b69281709 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Lazy-loading 'resources' on Instance uuid f4af587c-08d3-457e-a20d-a5ea8aad311f {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2255.395600] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7cb00d8-a4ce-4597-8c1e-720010f8e52a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2255.413717] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-8adcc4f8-5314-4b52-807d-3e13f1ad45e9 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Updating instance '27928d2b-1ed5-4326-81e4-1dade794c6a7' progress to 0 {{(pid=62405) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2255.638576] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebb029fe-3d7b-4b7c-b60a-9276a4556308 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2255.645867] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63b1d07a-2d38-4308-9eb6-a50eccf7f609 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2255.676885] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdb7b685-d129-4082-b95e-49163081cdf5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2255.684130] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd43069b-72d4-4c42-9920-aabf0968c835 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2255.697112] env[62405]: DEBUG nova.compute.provider_tree [None req-153f1e45-2de3-4aff-8c4e-b20b69281709 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2255.919513] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-8adcc4f8-5314-4b52-807d-3e13f1ad45e9 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2255.919755] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-866e0973-4c1a-4e21-9d48-e83972846229 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2255.927344] env[62405]: DEBUG oslo_vmware.api [None req-8adcc4f8-5314-4b52-807d-3e13f1ad45e9 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Waiting for the task: (returnval){ [ 2255.927344] env[62405]: value = "task-1948410" [ 2255.927344] env[62405]: _type = "Task" [ 2255.927344] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2255.935305] env[62405]: DEBUG oslo_vmware.api [None req-8adcc4f8-5314-4b52-807d-3e13f1ad45e9 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948410, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2256.200558] env[62405]: DEBUG nova.scheduler.client.report [None req-153f1e45-2de3-4aff-8c4e-b20b69281709 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2256.436819] env[62405]: DEBUG oslo_vmware.api [None req-8adcc4f8-5314-4b52-807d-3e13f1ad45e9 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948410, 'name': PowerOffVM_Task, 'duration_secs': 0.155614} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2256.437088] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-8adcc4f8-5314-4b52-807d-3e13f1ad45e9 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2256.437273] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-8adcc4f8-5314-4b52-807d-3e13f1ad45e9 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Updating instance '27928d2b-1ed5-4326-81e4-1dade794c6a7' progress to 17 {{(pid=62405) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2256.705518] env[62405]: DEBUG oslo_concurrency.lockutils [None req-153f1e45-2de3-4aff-8c4e-b20b69281709 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.631s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2256.723236] env[62405]: INFO nova.scheduler.client.report [None req-153f1e45-2de3-4aff-8c4e-b20b69281709 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Deleted allocations for instance f4af587c-08d3-457e-a20d-a5ea8aad311f [ 2256.943129] env[62405]: DEBUG nova.virt.hardware [None req-8adcc4f8-5314-4b52-807d-3e13f1ad45e9 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:21Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2256.943376] env[62405]: DEBUG nova.virt.hardware [None req-8adcc4f8-5314-4b52-807d-3e13f1ad45e9 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2256.943492] env[62405]: DEBUG nova.virt.hardware [None req-8adcc4f8-5314-4b52-807d-3e13f1ad45e9 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2256.943677] env[62405]: DEBUG nova.virt.hardware [None req-8adcc4f8-5314-4b52-807d-3e13f1ad45e9 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2256.943824] env[62405]: DEBUG nova.virt.hardware [None req-8adcc4f8-5314-4b52-807d-3e13f1ad45e9 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2256.943977] env[62405]: DEBUG nova.virt.hardware [None req-8adcc4f8-5314-4b52-807d-3e13f1ad45e9 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2256.944206] env[62405]: DEBUG nova.virt.hardware [None req-8adcc4f8-5314-4b52-807d-3e13f1ad45e9 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2256.944371] env[62405]: DEBUG nova.virt.hardware [None req-8adcc4f8-5314-4b52-807d-3e13f1ad45e9 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2256.944537] env[62405]: DEBUG nova.virt.hardware [None req-8adcc4f8-5314-4b52-807d-3e13f1ad45e9 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2256.944700] env[62405]: DEBUG nova.virt.hardware [None req-8adcc4f8-5314-4b52-807d-3e13f1ad45e9 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2256.944874] env[62405]: DEBUG nova.virt.hardware [None req-8adcc4f8-5314-4b52-807d-3e13f1ad45e9 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2256.949952] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f57cf6df-0ed2-4b07-890b-9b11734f34fc {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2256.967064] env[62405]: DEBUG oslo_vmware.api [None req-8adcc4f8-5314-4b52-807d-3e13f1ad45e9 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Waiting for the task: (returnval){ [ 2256.967064] env[62405]: value = "task-1948411" [ 2256.967064] env[62405]: _type = "Task" [ 2256.967064] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2256.975426] env[62405]: DEBUG oslo_vmware.api [None req-8adcc4f8-5314-4b52-807d-3e13f1ad45e9 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948411, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2257.231977] env[62405]: DEBUG oslo_concurrency.lockutils [None req-153f1e45-2de3-4aff-8c4e-b20b69281709 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Lock "f4af587c-08d3-457e-a20d-a5ea8aad311f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.745s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2257.476550] env[62405]: DEBUG oslo_vmware.api [None req-8adcc4f8-5314-4b52-807d-3e13f1ad45e9 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948411, 'name': ReconfigVM_Task, 'duration_secs': 0.157985} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2257.476857] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-8adcc4f8-5314-4b52-807d-3e13f1ad45e9 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Updating instance '27928d2b-1ed5-4326-81e4-1dade794c6a7' progress to 33 {{(pid=62405) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2257.985046] env[62405]: DEBUG nova.virt.hardware [None req-8adcc4f8-5314-4b52-807d-3e13f1ad45e9 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2257.985445] env[62405]: DEBUG nova.virt.hardware [None req-8adcc4f8-5314-4b52-807d-3e13f1ad45e9 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2257.985723] env[62405]: DEBUG nova.virt.hardware [None req-8adcc4f8-5314-4b52-807d-3e13f1ad45e9 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2257.986059] env[62405]: DEBUG nova.virt.hardware [None req-8adcc4f8-5314-4b52-807d-3e13f1ad45e9 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2257.986339] env[62405]: DEBUG nova.virt.hardware [None req-8adcc4f8-5314-4b52-807d-3e13f1ad45e9 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2257.986581] env[62405]: DEBUG nova.virt.hardware [None req-8adcc4f8-5314-4b52-807d-3e13f1ad45e9 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2257.986810] env[62405]: DEBUG nova.virt.hardware [None req-8adcc4f8-5314-4b52-807d-3e13f1ad45e9 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2257.986977] env[62405]: DEBUG nova.virt.hardware [None req-8adcc4f8-5314-4b52-807d-3e13f1ad45e9 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2257.987185] env[62405]: DEBUG nova.virt.hardware [None req-8adcc4f8-5314-4b52-807d-3e13f1ad45e9 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2257.987359] env[62405]: DEBUG nova.virt.hardware [None req-8adcc4f8-5314-4b52-807d-3e13f1ad45e9 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2257.987537] env[62405]: DEBUG nova.virt.hardware [None req-8adcc4f8-5314-4b52-807d-3e13f1ad45e9 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2257.992886] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-8adcc4f8-5314-4b52-807d-3e13f1ad45e9 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Reconfiguring VM instance instance-00000079 to detach disk 2000 {{(pid=62405) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2257.993200] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-669e92c5-cd74-4b37-8a8b-a8ff582d8087 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2258.012250] env[62405]: DEBUG oslo_vmware.api [None req-8adcc4f8-5314-4b52-807d-3e13f1ad45e9 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Waiting for the task: (returnval){ [ 2258.012250] env[62405]: value = "task-1948412" [ 2258.012250] env[62405]: _type = "Task" [ 2258.012250] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2258.019904] env[62405]: DEBUG oslo_vmware.api [None req-8adcc4f8-5314-4b52-807d-3e13f1ad45e9 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948412, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2258.522051] env[62405]: DEBUG oslo_vmware.api [None req-8adcc4f8-5314-4b52-807d-3e13f1ad45e9 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948412, 'name': ReconfigVM_Task, 'duration_secs': 0.151088} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2258.522358] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-8adcc4f8-5314-4b52-807d-3e13f1ad45e9 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Reconfigured VM instance instance-00000079 to detach disk 2000 {{(pid=62405) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2258.523136] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bd17172-2a71-4237-a86a-21ff0dfc6309 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2258.545408] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-8adcc4f8-5314-4b52-807d-3e13f1ad45e9 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Reconfiguring VM instance instance-00000079 to attach disk [datastore1] 27928d2b-1ed5-4326-81e4-1dade794c6a7/27928d2b-1ed5-4326-81e4-1dade794c6a7.vmdk or device None with type thin {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2258.545968] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-39de893d-a338-41d4-8a56-16467e9c07d9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2258.563533] env[62405]: DEBUG oslo_vmware.api [None req-8adcc4f8-5314-4b52-807d-3e13f1ad45e9 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Waiting for the task: (returnval){ [ 2258.563533] env[62405]: value = "task-1948413" [ 2258.563533] env[62405]: _type = "Task" [ 2258.563533] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2258.570714] env[62405]: DEBUG oslo_vmware.api [None req-8adcc4f8-5314-4b52-807d-3e13f1ad45e9 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948413, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2259.073508] env[62405]: DEBUG oslo_vmware.api [None req-8adcc4f8-5314-4b52-807d-3e13f1ad45e9 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948413, 'name': ReconfigVM_Task, 'duration_secs': 0.235006} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2259.073787] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-8adcc4f8-5314-4b52-807d-3e13f1ad45e9 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Reconfigured VM instance instance-00000079 to attach disk [datastore1] 27928d2b-1ed5-4326-81e4-1dade794c6a7/27928d2b-1ed5-4326-81e4-1dade794c6a7.vmdk or device None with type thin {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2259.074070] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-8adcc4f8-5314-4b52-807d-3e13f1ad45e9 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Updating instance '27928d2b-1ed5-4326-81e4-1dade794c6a7' progress to 50 {{(pid=62405) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2259.280854] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Acquiring lock "3e67c74a-1879-4e74-afad-cd7446f284b3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2259.281119] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Lock "3e67c74a-1879-4e74-afad-cd7446f284b3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2259.580074] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73be8b61-f97e-4f05-88d8-8f79fc2eb7ba {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2259.598747] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-149f71d1-28d4-42a8-921b-23c7d0466da4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2259.615698] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-8adcc4f8-5314-4b52-807d-3e13f1ad45e9 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Updating instance '27928d2b-1ed5-4326-81e4-1dade794c6a7' progress to 67 {{(pid=62405) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2259.783852] env[62405]: DEBUG nova.compute.manager [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 3e67c74a-1879-4e74-afad-cd7446f284b3] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2260.168218] env[62405]: DEBUG nova.network.neutron [None req-8adcc4f8-5314-4b52-807d-3e13f1ad45e9 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Port 9cd0704c-b882-4e84-a2fd-533974d3bbee binding to destination host cpu-1 is already ACTIVE {{(pid=62405) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 2260.305902] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2260.306255] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2260.307685] env[62405]: INFO nova.compute.claims [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 3e67c74a-1879-4e74-afad-cd7446f284b3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2261.186540] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8adcc4f8-5314-4b52-807d-3e13f1ad45e9 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Acquiring lock "27928d2b-1ed5-4326-81e4-1dade794c6a7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2261.187049] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8adcc4f8-5314-4b52-807d-3e13f1ad45e9 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Lock "27928d2b-1ed5-4326-81e4-1dade794c6a7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2261.187049] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8adcc4f8-5314-4b52-807d-3e13f1ad45e9 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Lock "27928d2b-1ed5-4326-81e4-1dade794c6a7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2261.372054] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-084bbae5-2a75-4def-99fd-9b052af43b28 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2261.379530] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f25be9ab-eba7-4a0a-b894-efedc792e0c4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2261.408277] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db58e839-9943-41e2-92fa-151e5e117cf2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2261.415473] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2004c822-bdae-4f8f-a61e-a8179aa06837 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2261.429018] env[62405]: DEBUG nova.compute.provider_tree [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 172, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2261.950831] env[62405]: ERROR nova.scheduler.client.report [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [req-149cae0e-91e8-456a-ad3e-0ef4346c3ada] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 172, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7d5eded7-a501-4fa6-b1d3-60e273d555d7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-149cae0e-91e8-456a-ad3e-0ef4346c3ada"}]} [ 2261.967982] env[62405]: DEBUG nova.scheduler.client.report [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Refreshing inventories for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 2261.980998] env[62405]: DEBUG nova.scheduler.client.report [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Updating ProviderTree inventory for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 2261.981238] env[62405]: DEBUG nova.compute.provider_tree [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2261.991371] env[62405]: DEBUG nova.scheduler.client.report [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Refreshing aggregate associations for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7, aggregates: None {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 2262.008194] env[62405]: DEBUG nova.scheduler.client.report [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Refreshing trait associations for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 2262.056355] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45b49086-fed3-465e-b4b7-f1e45d55491b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2262.063604] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-816dc6d6-6d1a-4b72-855b-63185d7a3f6d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2262.091770] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-565f5ca5-7930-4d36-a465-4d3f966f657a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2262.098364] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98940dc2-ef53-455e-9cd5-d93e8304c11a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2262.110375] env[62405]: DEBUG nova.compute.provider_tree [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 172, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2262.224990] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8adcc4f8-5314-4b52-807d-3e13f1ad45e9 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Acquiring lock "refresh_cache-27928d2b-1ed5-4326-81e4-1dade794c6a7" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2262.225280] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8adcc4f8-5314-4b52-807d-3e13f1ad45e9 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Acquired lock "refresh_cache-27928d2b-1ed5-4326-81e4-1dade794c6a7" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2262.225402] env[62405]: DEBUG nova.network.neutron [None req-8adcc4f8-5314-4b52-807d-3e13f1ad45e9 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2262.638673] env[62405]: DEBUG nova.scheduler.client.report [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Updated inventory for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with generation 196 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 172, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 2262.638935] env[62405]: DEBUG nova.compute.provider_tree [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Updating resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 generation from 196 to 197 during operation: update_inventory {{(pid=62405) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2262.639130] env[62405]: DEBUG nova.compute.provider_tree [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 172, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2262.940367] env[62405]: DEBUG nova.network.neutron [None req-8adcc4f8-5314-4b52-807d-3e13f1ad45e9 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Updating instance_info_cache with network_info: [{"id": "9cd0704c-b882-4e84-a2fd-533974d3bbee", "address": "fa:16:3e:7b:21:33", "network": {"id": "f9883b88-e1ff-4499-9214-4cc672383a10", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1580742860-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dd9a1a4650b34e388c50c7575cf09a7c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0a88b707-352e-4be7-b1d6-ad6074b40ed9", "external-id": "nsx-vlan-transportzone-789", "segmentation_id": 789, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9cd0704c-b8", "ovs_interfaceid": "9cd0704c-b882-4e84-a2fd-533974d3bbee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2263.144340] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.838s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2263.144798] env[62405]: DEBUG nova.compute.manager [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 3e67c74a-1879-4e74-afad-cd7446f284b3] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2263.443482] env[62405]: DEBUG oslo_concurrency.lockutils [None req-8adcc4f8-5314-4b52-807d-3e13f1ad45e9 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Releasing lock "refresh_cache-27928d2b-1ed5-4326-81e4-1dade794c6a7" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2263.648902] env[62405]: DEBUG nova.compute.utils [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2263.650335] env[62405]: DEBUG nova.compute.manager [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 3e67c74a-1879-4e74-afad-cd7446f284b3] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2263.650509] env[62405]: DEBUG nova.network.neutron [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 3e67c74a-1879-4e74-afad-cd7446f284b3] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2263.690218] env[62405]: DEBUG nova.policy [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b4ac1534df994c18bad62ec85acbc69f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '03a423f493034065bb1591d14d215ed8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 2263.976325] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6181b0d4-fd90-42fd-8ce6-7d41fa26c591 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2263.995079] env[62405]: DEBUG nova.network.neutron [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 3e67c74a-1879-4e74-afad-cd7446f284b3] Successfully created port: 8e59c1d7-8667-400c-b30b-9d6aeaec3422 {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2263.997554] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-942f176d-2de5-492f-9c25-1ddf0016c0b4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2264.004974] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-8adcc4f8-5314-4b52-807d-3e13f1ad45e9 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Updating instance '27928d2b-1ed5-4326-81e4-1dade794c6a7' progress to 83 {{(pid=62405) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2264.153348] env[62405]: DEBUG nova.compute.manager [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 3e67c74a-1879-4e74-afad-cd7446f284b3] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2264.510817] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-8adcc4f8-5314-4b52-807d-3e13f1ad45e9 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2264.511170] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-00597a2f-2f12-4e84-9d6a-a8d48dd6ecd7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2264.518525] env[62405]: DEBUG oslo_vmware.api [None req-8adcc4f8-5314-4b52-807d-3e13f1ad45e9 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Waiting for the task: (returnval){ [ 2264.518525] env[62405]: value = "task-1948414" [ 2264.518525] env[62405]: _type = "Task" [ 2264.518525] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2264.526555] env[62405]: DEBUG oslo_vmware.api [None req-8adcc4f8-5314-4b52-807d-3e13f1ad45e9 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948414, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2265.028787] env[62405]: DEBUG oslo_vmware.api [None req-8adcc4f8-5314-4b52-807d-3e13f1ad45e9 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948414, 'name': PowerOnVM_Task, 'duration_secs': 0.493999} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2265.029200] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-8adcc4f8-5314-4b52-807d-3e13f1ad45e9 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2265.029498] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-8adcc4f8-5314-4b52-807d-3e13f1ad45e9 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Updating instance '27928d2b-1ed5-4326-81e4-1dade794c6a7' progress to 100 {{(pid=62405) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2265.162707] env[62405]: DEBUG nova.compute.manager [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 3e67c74a-1879-4e74-afad-cd7446f284b3] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2265.188577] env[62405]: DEBUG nova.virt.hardware [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2265.188818] env[62405]: DEBUG nova.virt.hardware [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2265.188977] env[62405]: DEBUG nova.virt.hardware [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2265.189214] env[62405]: DEBUG nova.virt.hardware [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2265.189374] env[62405]: DEBUG nova.virt.hardware [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2265.189525] env[62405]: DEBUG nova.virt.hardware [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2265.189736] env[62405]: DEBUG nova.virt.hardware [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2265.189899] env[62405]: DEBUG nova.virt.hardware [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2265.190081] env[62405]: DEBUG nova.virt.hardware [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2265.190253] env[62405]: DEBUG nova.virt.hardware [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2265.190427] env[62405]: DEBUG nova.virt.hardware [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2265.191279] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-799b8867-b5c4-469f-a870-61600cc9fbb0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2265.200237] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34e8f808-d442-4e8d-be03-749fba63fbf5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2265.415486] env[62405]: DEBUG nova.compute.manager [req-44e555cf-1261-42b8-ac1c-1ea522fb2c4b req-9d3f4661-0aab-4715-b763-50ef2c631922 service nova] [instance: 3e67c74a-1879-4e74-afad-cd7446f284b3] Received event network-vif-plugged-8e59c1d7-8667-400c-b30b-9d6aeaec3422 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2265.415762] env[62405]: DEBUG oslo_concurrency.lockutils [req-44e555cf-1261-42b8-ac1c-1ea522fb2c4b req-9d3f4661-0aab-4715-b763-50ef2c631922 service nova] Acquiring lock "3e67c74a-1879-4e74-afad-cd7446f284b3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2265.415918] env[62405]: DEBUG oslo_concurrency.lockutils [req-44e555cf-1261-42b8-ac1c-1ea522fb2c4b req-9d3f4661-0aab-4715-b763-50ef2c631922 service nova] Lock "3e67c74a-1879-4e74-afad-cd7446f284b3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2265.416101] env[62405]: DEBUG oslo_concurrency.lockutils [req-44e555cf-1261-42b8-ac1c-1ea522fb2c4b req-9d3f4661-0aab-4715-b763-50ef2c631922 service nova] Lock "3e67c74a-1879-4e74-afad-cd7446f284b3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2265.416273] env[62405]: DEBUG nova.compute.manager [req-44e555cf-1261-42b8-ac1c-1ea522fb2c4b req-9d3f4661-0aab-4715-b763-50ef2c631922 service nova] [instance: 3e67c74a-1879-4e74-afad-cd7446f284b3] No waiting events found dispatching network-vif-plugged-8e59c1d7-8667-400c-b30b-9d6aeaec3422 {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2265.416433] env[62405]: WARNING nova.compute.manager [req-44e555cf-1261-42b8-ac1c-1ea522fb2c4b req-9d3f4661-0aab-4715-b763-50ef2c631922 service nova] [instance: 3e67c74a-1879-4e74-afad-cd7446f284b3] Received unexpected event network-vif-plugged-8e59c1d7-8667-400c-b30b-9d6aeaec3422 for instance with vm_state building and task_state spawning. [ 2265.495973] env[62405]: DEBUG nova.network.neutron [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 3e67c74a-1879-4e74-afad-cd7446f284b3] Successfully updated port: 8e59c1d7-8667-400c-b30b-9d6aeaec3422 {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2266.000533] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Acquiring lock "refresh_cache-3e67c74a-1879-4e74-afad-cd7446f284b3" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2266.000784] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Acquired lock "refresh_cache-3e67c74a-1879-4e74-afad-cd7446f284b3" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2266.000838] env[62405]: DEBUG nova.network.neutron [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 3e67c74a-1879-4e74-afad-cd7446f284b3] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2266.533188] env[62405]: DEBUG nova.network.neutron [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 3e67c74a-1879-4e74-afad-cd7446f284b3] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2266.673887] env[62405]: DEBUG nova.network.neutron [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 3e67c74a-1879-4e74-afad-cd7446f284b3] Updating instance_info_cache with network_info: [{"id": "8e59c1d7-8667-400c-b30b-9d6aeaec3422", "address": "fa:16:3e:fc:a4:2a", "network": {"id": "a7be14dc-1fb5-4a85-a574-ff7086958535", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-359688506-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03a423f493034065bb1591d14d215ed8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77aa121f-8fb6-42f3-aaea-43addfe449b2", "external-id": "nsx-vlan-transportzone-288", "segmentation_id": 288, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8e59c1d7-86", "ovs_interfaceid": "8e59c1d7-8667-400c-b30b-9d6aeaec3422", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2267.176627] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Releasing lock "refresh_cache-3e67c74a-1879-4e74-afad-cd7446f284b3" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2267.177034] env[62405]: DEBUG nova.compute.manager [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 3e67c74a-1879-4e74-afad-cd7446f284b3] Instance network_info: |[{"id": "8e59c1d7-8667-400c-b30b-9d6aeaec3422", "address": "fa:16:3e:fc:a4:2a", "network": {"id": "a7be14dc-1fb5-4a85-a574-ff7086958535", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-359688506-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03a423f493034065bb1591d14d215ed8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77aa121f-8fb6-42f3-aaea-43addfe449b2", "external-id": "nsx-vlan-transportzone-288", "segmentation_id": 288, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8e59c1d7-86", "ovs_interfaceid": "8e59c1d7-8667-400c-b30b-9d6aeaec3422", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2267.177523] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 3e67c74a-1879-4e74-afad-cd7446f284b3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fc:a4:2a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '77aa121f-8fb6-42f3-aaea-43addfe449b2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8e59c1d7-8667-400c-b30b-9d6aeaec3422', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2267.185759] env[62405]: DEBUG oslo.service.loopingcall [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2267.186035] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3e67c74a-1879-4e74-afad-cd7446f284b3] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2267.186316] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8fa86ada-9a08-47ab-8f42-793cc5faf486 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.208576] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2267.208576] env[62405]: value = "task-1948415" [ 2267.208576] env[62405]: _type = "Task" [ 2267.208576] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2267.216282] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1948415, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2267.439228] env[62405]: DEBUG nova.compute.manager [req-2ad66b3f-17bb-4c72-a91a-17c04036ad3e req-43907ddc-f050-4ca8-a2f2-ebdfa5ebcbe0 service nova] [instance: 3e67c74a-1879-4e74-afad-cd7446f284b3] Received event network-changed-8e59c1d7-8667-400c-b30b-9d6aeaec3422 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2267.439396] env[62405]: DEBUG nova.compute.manager [req-2ad66b3f-17bb-4c72-a91a-17c04036ad3e req-43907ddc-f050-4ca8-a2f2-ebdfa5ebcbe0 service nova] [instance: 3e67c74a-1879-4e74-afad-cd7446f284b3] Refreshing instance network info cache due to event network-changed-8e59c1d7-8667-400c-b30b-9d6aeaec3422. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 2267.439612] env[62405]: DEBUG oslo_concurrency.lockutils [req-2ad66b3f-17bb-4c72-a91a-17c04036ad3e req-43907ddc-f050-4ca8-a2f2-ebdfa5ebcbe0 service nova] Acquiring lock "refresh_cache-3e67c74a-1879-4e74-afad-cd7446f284b3" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2267.439758] env[62405]: DEBUG oslo_concurrency.lockutils [req-2ad66b3f-17bb-4c72-a91a-17c04036ad3e req-43907ddc-f050-4ca8-a2f2-ebdfa5ebcbe0 service nova] Acquired lock "refresh_cache-3e67c74a-1879-4e74-afad-cd7446f284b3" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2267.439926] env[62405]: DEBUG nova.network.neutron [req-2ad66b3f-17bb-4c72-a91a-17c04036ad3e req-43907ddc-f050-4ca8-a2f2-ebdfa5ebcbe0 service nova] [instance: 3e67c74a-1879-4e74-afad-cd7446f284b3] Refreshing network info cache for port 8e59c1d7-8667-400c-b30b-9d6aeaec3422 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2267.718430] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1948415, 'name': CreateVM_Task} progress is 25%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2267.865221] env[62405]: DEBUG nova.network.neutron [None req-9e7c160b-3400-49a9-bda7-b09bf39fd5a4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Port 9cd0704c-b882-4e84-a2fd-533974d3bbee binding to destination host cpu-1 is already ACTIVE {{(pid=62405) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 2267.865483] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9e7c160b-3400-49a9-bda7-b09bf39fd5a4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Acquiring lock "refresh_cache-27928d2b-1ed5-4326-81e4-1dade794c6a7" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2267.865634] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9e7c160b-3400-49a9-bda7-b09bf39fd5a4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Acquired lock "refresh_cache-27928d2b-1ed5-4326-81e4-1dade794c6a7" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2267.865795] env[62405]: DEBUG nova.network.neutron [None req-9e7c160b-3400-49a9-bda7-b09bf39fd5a4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2268.118788] env[62405]: DEBUG nova.network.neutron [req-2ad66b3f-17bb-4c72-a91a-17c04036ad3e req-43907ddc-f050-4ca8-a2f2-ebdfa5ebcbe0 service nova] [instance: 3e67c74a-1879-4e74-afad-cd7446f284b3] Updated VIF entry in instance network info cache for port 8e59c1d7-8667-400c-b30b-9d6aeaec3422. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2268.119177] env[62405]: DEBUG nova.network.neutron [req-2ad66b3f-17bb-4c72-a91a-17c04036ad3e req-43907ddc-f050-4ca8-a2f2-ebdfa5ebcbe0 service nova] [instance: 3e67c74a-1879-4e74-afad-cd7446f284b3] Updating instance_info_cache with network_info: [{"id": "8e59c1d7-8667-400c-b30b-9d6aeaec3422", "address": "fa:16:3e:fc:a4:2a", "network": {"id": "a7be14dc-1fb5-4a85-a574-ff7086958535", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-359688506-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03a423f493034065bb1591d14d215ed8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77aa121f-8fb6-42f3-aaea-43addfe449b2", "external-id": "nsx-vlan-transportzone-288", "segmentation_id": 288, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8e59c1d7-86", "ovs_interfaceid": "8e59c1d7-8667-400c-b30b-9d6aeaec3422", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2268.218988] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1948415, 'name': CreateVM_Task} progress is 25%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2268.621341] env[62405]: DEBUG oslo_concurrency.lockutils [req-2ad66b3f-17bb-4c72-a91a-17c04036ad3e req-43907ddc-f050-4ca8-a2f2-ebdfa5ebcbe0 service nova] Releasing lock "refresh_cache-3e67c74a-1879-4e74-afad-cd7446f284b3" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2268.638660] env[62405]: DEBUG nova.network.neutron [None req-9e7c160b-3400-49a9-bda7-b09bf39fd5a4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Updating instance_info_cache with network_info: [{"id": "9cd0704c-b882-4e84-a2fd-533974d3bbee", "address": "fa:16:3e:7b:21:33", "network": {"id": "f9883b88-e1ff-4499-9214-4cc672383a10", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1580742860-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dd9a1a4650b34e388c50c7575cf09a7c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0a88b707-352e-4be7-b1d6-ad6074b40ed9", "external-id": "nsx-vlan-transportzone-789", "segmentation_id": 789, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9cd0704c-b8", "ovs_interfaceid": "9cd0704c-b882-4e84-a2fd-533974d3bbee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2268.719150] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1948415, 'name': CreateVM_Task} progress is 25%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2269.142082] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9e7c160b-3400-49a9-bda7-b09bf39fd5a4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Releasing lock "refresh_cache-27928d2b-1ed5-4326-81e4-1dade794c6a7" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2269.219085] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1948415, 'name': CreateVM_Task} progress is 99%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2269.645965] env[62405]: DEBUG nova.compute.manager [None req-9e7c160b-3400-49a9-bda7-b09bf39fd5a4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=62405) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:901}} [ 2269.646231] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9e7c160b-3400-49a9-bda7-b09bf39fd5a4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2269.646463] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9e7c160b-3400-49a9-bda7-b09bf39fd5a4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2269.720698] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1948415, 'name': CreateVM_Task, 'duration_secs': 2.03625} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2269.720864] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3e67c74a-1879-4e74-afad-cd7446f284b3] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2269.721570] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2269.721736] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2269.722096] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2269.722587] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5d858198-cd79-4216-a00d-5e61fc05bcf0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2269.726874] env[62405]: DEBUG oslo_vmware.api [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for the task: (returnval){ [ 2269.726874] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]522a789c-46f1-3367-5228-fc49cd7f356c" [ 2269.726874] env[62405]: _type = "Task" [ 2269.726874] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2269.733908] env[62405]: DEBUG oslo_vmware.api [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]522a789c-46f1-3367-5228-fc49cd7f356c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2270.149814] env[62405]: DEBUG nova.objects.instance [None req-9e7c160b-3400-49a9-bda7-b09bf39fd5a4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Lazy-loading 'migration_context' on Instance uuid 27928d2b-1ed5-4326-81e4-1dade794c6a7 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2270.237233] env[62405]: DEBUG oslo_vmware.api [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]522a789c-46f1-3367-5228-fc49cd7f356c, 'name': SearchDatastore_Task, 'duration_secs': 0.010728} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2270.237559] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2270.237733] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 3e67c74a-1879-4e74-afad-cd7446f284b3] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2270.237959] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2270.238121] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2270.238298] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2270.238568] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-656b3ba9-2705-4a93-b889-c53224b29a86 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2270.246634] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2270.246797] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2270.247441] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c00f823a-c73c-4a79-ab0a-420f6092fef5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2270.252066] env[62405]: DEBUG oslo_vmware.api [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for the task: (returnval){ [ 2270.252066] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5209ba76-00c0-6021-669c-d6f160304309" [ 2270.252066] env[62405]: _type = "Task" [ 2270.252066] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2270.258670] env[62405]: DEBUG oslo_vmware.api [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5209ba76-00c0-6021-669c-d6f160304309, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2270.719871] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-760789bc-c8fb-4701-b733-998fe1301a2a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2270.727738] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-153d4c2f-b394-47d2-961b-e183765a9204 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2270.760762] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0352082e-1ac7-474b-b50e-29aafdd894d6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2270.768471] env[62405]: DEBUG oslo_vmware.api [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5209ba76-00c0-6021-669c-d6f160304309, 'name': SearchDatastore_Task, 'duration_secs': 0.010441} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2270.770915] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-42d9fb49-33aa-498e-b9e8-c304fc3a2841 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2270.773849] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7759280-8c25-4209-b677-241d8e057782 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2270.780419] env[62405]: DEBUG oslo_vmware.api [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for the task: (returnval){ [ 2270.780419] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]528715df-9a38-62f9-1172-f3209adaaf9f" [ 2270.780419] env[62405]: _type = "Task" [ 2270.780419] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2270.787967] env[62405]: DEBUG nova.compute.provider_tree [None req-9e7c160b-3400-49a9-bda7-b09bf39fd5a4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2270.796906] env[62405]: DEBUG oslo_vmware.api [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]528715df-9a38-62f9-1172-f3209adaaf9f, 'name': SearchDatastore_Task, 'duration_secs': 0.01005} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2270.797710] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2270.797953] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 3e67c74a-1879-4e74-afad-cd7446f284b3/3e67c74a-1879-4e74-afad-cd7446f284b3.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2270.798212] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b7f0da8e-d667-4cf0-8a25-c655f278ca16 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2270.804818] env[62405]: DEBUG oslo_vmware.api [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for the task: (returnval){ [ 2270.804818] env[62405]: value = "task-1948416" [ 2270.804818] env[62405]: _type = "Task" [ 2270.804818] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2270.812268] env[62405]: DEBUG oslo_vmware.api [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948416, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2271.294131] env[62405]: DEBUG nova.scheduler.client.report [None req-9e7c160b-3400-49a9-bda7-b09bf39fd5a4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 172, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2271.314849] env[62405]: DEBUG oslo_vmware.api [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948416, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.415363} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2271.315112] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 3e67c74a-1879-4e74-afad-cd7446f284b3/3e67c74a-1879-4e74-afad-cd7446f284b3.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2271.315329] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 3e67c74a-1879-4e74-afad-cd7446f284b3] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2271.315572] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f53b1a1b-f09e-40ad-8a61-c881025d1b8a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2271.322186] env[62405]: DEBUG oslo_vmware.api [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for the task: (returnval){ [ 2271.322186] env[62405]: value = "task-1948417" [ 2271.322186] env[62405]: _type = "Task" [ 2271.322186] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2271.330803] env[62405]: DEBUG oslo_vmware.api [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948417, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2271.831866] env[62405]: DEBUG oslo_vmware.api [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948417, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.087307} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2271.832157] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 3e67c74a-1879-4e74-afad-cd7446f284b3] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2271.832864] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5d9b68c-23e2-4d8b-8548-47e3d3f71b28 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2271.854205] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 3e67c74a-1879-4e74-afad-cd7446f284b3] Reconfiguring VM instance instance-0000007a to attach disk [datastore1] 3e67c74a-1879-4e74-afad-cd7446f284b3/3e67c74a-1879-4e74-afad-cd7446f284b3.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2271.854451] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e54aced6-8efa-4d06-837e-209f8e444483 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2271.872892] env[62405]: DEBUG oslo_vmware.api [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for the task: (returnval){ [ 2271.872892] env[62405]: value = "task-1948418" [ 2271.872892] env[62405]: _type = "Task" [ 2271.872892] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2271.882313] env[62405]: DEBUG oslo_vmware.api [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948418, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2272.305062] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9e7c160b-3400-49a9-bda7-b09bf39fd5a4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.658s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2272.382873] env[62405]: DEBUG oslo_vmware.api [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948418, 'name': ReconfigVM_Task, 'duration_secs': 0.313465} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2272.383161] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 3e67c74a-1879-4e74-afad-cd7446f284b3] Reconfigured VM instance instance-0000007a to attach disk [datastore1] 3e67c74a-1879-4e74-afad-cd7446f284b3/3e67c74a-1879-4e74-afad-cd7446f284b3.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2272.383767] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-00bc2ee5-84c5-44c2-9df7-67727e5c4dc4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2272.390514] env[62405]: DEBUG oslo_vmware.api [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for the task: (returnval){ [ 2272.390514] env[62405]: value = "task-1948419" [ 2272.390514] env[62405]: _type = "Task" [ 2272.390514] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2272.401009] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2272.404900] env[62405]: DEBUG oslo_vmware.api [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948419, 'name': Rename_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2272.900601] env[62405]: DEBUG oslo_vmware.api [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948419, 'name': Rename_Task, 'duration_secs': 0.151137} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2272.900894] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 3e67c74a-1879-4e74-afad-cd7446f284b3] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2272.901160] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-69ce4742-f40b-4ded-bd07-fbb5d93c9275 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2272.906645] env[62405]: DEBUG oslo_vmware.api [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for the task: (returnval){ [ 2272.906645] env[62405]: value = "task-1948420" [ 2272.906645] env[62405]: _type = "Task" [ 2272.906645] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2272.917669] env[62405]: DEBUG oslo_vmware.api [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948420, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2273.422680] env[62405]: DEBUG oslo_vmware.api [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948420, 'name': PowerOnVM_Task, 'duration_secs': 0.461683} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2273.423099] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 3e67c74a-1879-4e74-afad-cd7446f284b3] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2273.423422] env[62405]: INFO nova.compute.manager [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 3e67c74a-1879-4e74-afad-cd7446f284b3] Took 8.26 seconds to spawn the instance on the hypervisor. [ 2273.423720] env[62405]: DEBUG nova.compute.manager [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 3e67c74a-1879-4e74-afad-cd7446f284b3] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2273.424845] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01c44544-7e40-4196-93dd-2086522ad3b6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2273.840422] env[62405]: INFO nova.compute.manager [None req-9e7c160b-3400-49a9-bda7-b09bf39fd5a4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Swapping old allocation on dict_keys(['7d5eded7-a501-4fa6-b1d3-60e273d555d7']) held by migration c84580de-adad-4523-9937-1b28501870d6 for instance [ 2273.860978] env[62405]: DEBUG nova.scheduler.client.report [None req-9e7c160b-3400-49a9-bda7-b09bf39fd5a4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Overwriting current allocation {'allocations': {'7d5eded7-a501-4fa6-b1d3-60e273d555d7': {'resources': {'VCPU': 1, 'MEMORY_MB': 256, 'DISK_GB': 1}, 'generation': 197}}, 'project_id': 'dd9a1a4650b34e388c50c7575cf09a7c', 'user_id': 'f9c18747ac7149dba0e1c0a8fc6c0b7e', 'consumer_generation': 1} on consumer 27928d2b-1ed5-4326-81e4-1dade794c6a7 {{(pid=62405) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 2273.932424] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9e7c160b-3400-49a9-bda7-b09bf39fd5a4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Acquiring lock "refresh_cache-27928d2b-1ed5-4326-81e4-1dade794c6a7" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2273.932611] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9e7c160b-3400-49a9-bda7-b09bf39fd5a4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Acquired lock "refresh_cache-27928d2b-1ed5-4326-81e4-1dade794c6a7" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2273.932786] env[62405]: DEBUG nova.network.neutron [None req-9e7c160b-3400-49a9-bda7-b09bf39fd5a4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2273.942190] env[62405]: INFO nova.compute.manager [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 3e67c74a-1879-4e74-afad-cd7446f284b3] Took 13.65 seconds to build instance. [ 2274.401745] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2274.444090] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b71aea7b-7ed8-4439-bf45-aa6e3fc60853 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Lock "3e67c74a-1879-4e74-afad-cd7446f284b3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.163s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2274.690296] env[62405]: DEBUG nova.compute.manager [req-403749e2-4139-4188-b357-40dd90e01f59 req-09b5ec1d-591c-4b6e-beb7-b4e1cb4a2ffc service nova] [instance: 3e67c74a-1879-4e74-afad-cd7446f284b3] Received event network-changed-8e59c1d7-8667-400c-b30b-9d6aeaec3422 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2274.690489] env[62405]: DEBUG nova.compute.manager [req-403749e2-4139-4188-b357-40dd90e01f59 req-09b5ec1d-591c-4b6e-beb7-b4e1cb4a2ffc service nova] [instance: 3e67c74a-1879-4e74-afad-cd7446f284b3] Refreshing instance network info cache due to event network-changed-8e59c1d7-8667-400c-b30b-9d6aeaec3422. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 2274.690701] env[62405]: DEBUG oslo_concurrency.lockutils [req-403749e2-4139-4188-b357-40dd90e01f59 req-09b5ec1d-591c-4b6e-beb7-b4e1cb4a2ffc service nova] Acquiring lock "refresh_cache-3e67c74a-1879-4e74-afad-cd7446f284b3" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2274.690851] env[62405]: DEBUG oslo_concurrency.lockutils [req-403749e2-4139-4188-b357-40dd90e01f59 req-09b5ec1d-591c-4b6e-beb7-b4e1cb4a2ffc service nova] Acquired lock "refresh_cache-3e67c74a-1879-4e74-afad-cd7446f284b3" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2274.691009] env[62405]: DEBUG nova.network.neutron [req-403749e2-4139-4188-b357-40dd90e01f59 req-09b5ec1d-591c-4b6e-beb7-b4e1cb4a2ffc service nova] [instance: 3e67c74a-1879-4e74-afad-cd7446f284b3] Refreshing network info cache for port 8e59c1d7-8667-400c-b30b-9d6aeaec3422 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2274.753731] env[62405]: DEBUG nova.network.neutron [None req-9e7c160b-3400-49a9-bda7-b09bf39fd5a4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Updating instance_info_cache with network_info: [{"id": "9cd0704c-b882-4e84-a2fd-533974d3bbee", "address": "fa:16:3e:7b:21:33", "network": {"id": "f9883b88-e1ff-4499-9214-4cc672383a10", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1580742860-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dd9a1a4650b34e388c50c7575cf09a7c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0a88b707-352e-4be7-b1d6-ad6074b40ed9", "external-id": "nsx-vlan-transportzone-789", "segmentation_id": 789, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9cd0704c-b8", "ovs_interfaceid": "9cd0704c-b882-4e84-a2fd-533974d3bbee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2275.257073] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9e7c160b-3400-49a9-bda7-b09bf39fd5a4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Releasing lock "refresh_cache-27928d2b-1ed5-4326-81e4-1dade794c6a7" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2275.257516] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e7c160b-3400-49a9-bda7-b09bf39fd5a4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2275.257795] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0182337d-1d16-4052-81db-bef98665c3ed {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2275.264932] env[62405]: DEBUG oslo_vmware.api [None req-9e7c160b-3400-49a9-bda7-b09bf39fd5a4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Waiting for the task: (returnval){ [ 2275.264932] env[62405]: value = "task-1948421" [ 2275.264932] env[62405]: _type = "Task" [ 2275.264932] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2275.272598] env[62405]: DEBUG oslo_vmware.api [None req-9e7c160b-3400-49a9-bda7-b09bf39fd5a4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948421, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2275.401760] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2275.431611] env[62405]: DEBUG nova.network.neutron [req-403749e2-4139-4188-b357-40dd90e01f59 req-09b5ec1d-591c-4b6e-beb7-b4e1cb4a2ffc service nova] [instance: 3e67c74a-1879-4e74-afad-cd7446f284b3] Updated VIF entry in instance network info cache for port 8e59c1d7-8667-400c-b30b-9d6aeaec3422. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2275.431977] env[62405]: DEBUG nova.network.neutron [req-403749e2-4139-4188-b357-40dd90e01f59 req-09b5ec1d-591c-4b6e-beb7-b4e1cb4a2ffc service nova] [instance: 3e67c74a-1879-4e74-afad-cd7446f284b3] Updating instance_info_cache with network_info: [{"id": "8e59c1d7-8667-400c-b30b-9d6aeaec3422", "address": "fa:16:3e:fc:a4:2a", "network": {"id": "a7be14dc-1fb5-4a85-a574-ff7086958535", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-359688506-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03a423f493034065bb1591d14d215ed8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77aa121f-8fb6-42f3-aaea-43addfe449b2", "external-id": "nsx-vlan-transportzone-288", "segmentation_id": 288, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8e59c1d7-86", "ovs_interfaceid": "8e59c1d7-8667-400c-b30b-9d6aeaec3422", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2275.776547] env[62405]: DEBUG oslo_vmware.api [None req-9e7c160b-3400-49a9-bda7-b09bf39fd5a4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948421, 'name': PowerOffVM_Task, 'duration_secs': 0.189384} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2275.776547] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e7c160b-3400-49a9-bda7-b09bf39fd5a4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2275.776547] env[62405]: DEBUG nova.virt.hardware [None req-9e7c160b-3400-49a9-bda7-b09bf39fd5a4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2275.776547] env[62405]: DEBUG nova.virt.hardware [None req-9e7c160b-3400-49a9-bda7-b09bf39fd5a4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2275.776547] env[62405]: DEBUG nova.virt.hardware [None req-9e7c160b-3400-49a9-bda7-b09bf39fd5a4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2275.776928] env[62405]: DEBUG nova.virt.hardware [None req-9e7c160b-3400-49a9-bda7-b09bf39fd5a4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2275.777102] env[62405]: DEBUG nova.virt.hardware [None req-9e7c160b-3400-49a9-bda7-b09bf39fd5a4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2275.777367] env[62405]: DEBUG nova.virt.hardware [None req-9e7c160b-3400-49a9-bda7-b09bf39fd5a4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2275.777703] env[62405]: DEBUG nova.virt.hardware [None req-9e7c160b-3400-49a9-bda7-b09bf39fd5a4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2275.777980] env[62405]: DEBUG nova.virt.hardware [None req-9e7c160b-3400-49a9-bda7-b09bf39fd5a4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2275.778292] env[62405]: DEBUG nova.virt.hardware [None req-9e7c160b-3400-49a9-bda7-b09bf39fd5a4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2275.778564] env[62405]: DEBUG nova.virt.hardware [None req-9e7c160b-3400-49a9-bda7-b09bf39fd5a4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2275.778846] env[62405]: DEBUG nova.virt.hardware [None req-9e7c160b-3400-49a9-bda7-b09bf39fd5a4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2275.783657] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d1e1ccf4-e6b4-4244-a550-695225443279 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2275.798055] env[62405]: DEBUG oslo_vmware.api [None req-9e7c160b-3400-49a9-bda7-b09bf39fd5a4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Waiting for the task: (returnval){ [ 2275.798055] env[62405]: value = "task-1948423" [ 2275.798055] env[62405]: _type = "Task" [ 2275.798055] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2275.805286] env[62405]: DEBUG oslo_vmware.api [None req-9e7c160b-3400-49a9-bda7-b09bf39fd5a4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948423, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2275.935277] env[62405]: DEBUG oslo_concurrency.lockutils [req-403749e2-4139-4188-b357-40dd90e01f59 req-09b5ec1d-591c-4b6e-beb7-b4e1cb4a2ffc service nova] Releasing lock "refresh_cache-3e67c74a-1879-4e74-afad-cd7446f284b3" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2276.308176] env[62405]: DEBUG oslo_vmware.api [None req-9e7c160b-3400-49a9-bda7-b09bf39fd5a4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948423, 'name': ReconfigVM_Task, 'duration_secs': 0.132256} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2276.308960] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42a2182d-4307-4464-9909-add4fb34ffbf {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2276.326924] env[62405]: DEBUG nova.virt.hardware [None req-9e7c160b-3400-49a9-bda7-b09bf39fd5a4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2276.327181] env[62405]: DEBUG nova.virt.hardware [None req-9e7c160b-3400-49a9-bda7-b09bf39fd5a4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2276.327350] env[62405]: DEBUG nova.virt.hardware [None req-9e7c160b-3400-49a9-bda7-b09bf39fd5a4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2276.327560] env[62405]: DEBUG nova.virt.hardware [None req-9e7c160b-3400-49a9-bda7-b09bf39fd5a4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2276.327713] env[62405]: DEBUG nova.virt.hardware [None req-9e7c160b-3400-49a9-bda7-b09bf39fd5a4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2276.327865] env[62405]: DEBUG nova.virt.hardware [None req-9e7c160b-3400-49a9-bda7-b09bf39fd5a4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2276.328088] env[62405]: DEBUG nova.virt.hardware [None req-9e7c160b-3400-49a9-bda7-b09bf39fd5a4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2276.328260] env[62405]: DEBUG nova.virt.hardware [None req-9e7c160b-3400-49a9-bda7-b09bf39fd5a4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2276.328426] env[62405]: DEBUG nova.virt.hardware [None req-9e7c160b-3400-49a9-bda7-b09bf39fd5a4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2276.328590] env[62405]: DEBUG nova.virt.hardware [None req-9e7c160b-3400-49a9-bda7-b09bf39fd5a4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2276.328764] env[62405]: DEBUG nova.virt.hardware [None req-9e7c160b-3400-49a9-bda7-b09bf39fd5a4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2276.329547] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-013661cf-6c69-45e5-80f3-0b3b032dcb55 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2276.335342] env[62405]: DEBUG oslo_vmware.api [None req-9e7c160b-3400-49a9-bda7-b09bf39fd5a4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Waiting for the task: (returnval){ [ 2276.335342] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5283a032-c1e0-267d-cb35-0edc9712bc97" [ 2276.335342] env[62405]: _type = "Task" [ 2276.335342] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2276.344023] env[62405]: DEBUG oslo_vmware.api [None req-9e7c160b-3400-49a9-bda7-b09bf39fd5a4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5283a032-c1e0-267d-cb35-0edc9712bc97, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2276.400758] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2276.400983] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2276.845330] env[62405]: DEBUG oslo_vmware.api [None req-9e7c160b-3400-49a9-bda7-b09bf39fd5a4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]5283a032-c1e0-267d-cb35-0edc9712bc97, 'name': SearchDatastore_Task, 'duration_secs': 0.011208} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2276.850475] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e7c160b-3400-49a9-bda7-b09bf39fd5a4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Reconfiguring VM instance instance-00000079 to detach disk 2000 {{(pid=62405) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2276.850835] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d80b67c7-0bca-4a15-bd7a-109bb05569eb {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2276.868143] env[62405]: DEBUG oslo_vmware.api [None req-9e7c160b-3400-49a9-bda7-b09bf39fd5a4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Waiting for the task: (returnval){ [ 2276.868143] env[62405]: value = "task-1948424" [ 2276.868143] env[62405]: _type = "Task" [ 2276.868143] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2276.875598] env[62405]: DEBUG oslo_vmware.api [None req-9e7c160b-3400-49a9-bda7-b09bf39fd5a4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948424, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2277.377934] env[62405]: DEBUG oslo_vmware.api [None req-9e7c160b-3400-49a9-bda7-b09bf39fd5a4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948424, 'name': ReconfigVM_Task, 'duration_secs': 0.249057} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2277.378230] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e7c160b-3400-49a9-bda7-b09bf39fd5a4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Reconfigured VM instance instance-00000079 to detach disk 2000 {{(pid=62405) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2277.379010] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcef0bc5-e666-4e28-a907-c830b6e989f7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2277.400576] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e7c160b-3400-49a9-bda7-b09bf39fd5a4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Reconfiguring VM instance instance-00000079 to attach disk [datastore1] 27928d2b-1ed5-4326-81e4-1dade794c6a7/27928d2b-1ed5-4326-81e4-1dade794c6a7.vmdk or device None with type thin {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2277.400834] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-22bc9d51-c8f2-428e-8dc6-cc189848024b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2277.419059] env[62405]: DEBUG oslo_vmware.api [None req-9e7c160b-3400-49a9-bda7-b09bf39fd5a4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Waiting for the task: (returnval){ [ 2277.419059] env[62405]: value = "task-1948425" [ 2277.419059] env[62405]: _type = "Task" [ 2277.419059] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2277.427733] env[62405]: DEBUG oslo_vmware.api [None req-9e7c160b-3400-49a9-bda7-b09bf39fd5a4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948425, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2277.928740] env[62405]: DEBUG oslo_vmware.api [None req-9e7c160b-3400-49a9-bda7-b09bf39fd5a4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948425, 'name': ReconfigVM_Task, 'duration_secs': 0.261792} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2277.929060] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e7c160b-3400-49a9-bda7-b09bf39fd5a4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Reconfigured VM instance instance-00000079 to attach disk [datastore1] 27928d2b-1ed5-4326-81e4-1dade794c6a7/27928d2b-1ed5-4326-81e4-1dade794c6a7.vmdk or device None with type thin {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2277.929788] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-add9ab54-348d-4897-b373-27e302c341ce {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2277.948799] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93bec335-9b2b-4455-815a-0ab1564698fc {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2277.966632] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b5fd802-5c02-4537-919b-5bfc58cd5dc9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2277.986440] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5bbf535-b8d7-496c-9cb4-105e7f38f27e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2277.994022] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e7c160b-3400-49a9-bda7-b09bf39fd5a4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2277.994314] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-644b01c0-2613-4fc2-a980-e1bd29344468 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2278.003456] env[62405]: DEBUG oslo_vmware.api [None req-9e7c160b-3400-49a9-bda7-b09bf39fd5a4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Waiting for the task: (returnval){ [ 2278.003456] env[62405]: value = "task-1948426" [ 2278.003456] env[62405]: _type = "Task" [ 2278.003456] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2278.011326] env[62405]: DEBUG oslo_vmware.api [None req-9e7c160b-3400-49a9-bda7-b09bf39fd5a4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948426, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2278.401529] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2278.401766] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62405) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11065}} [ 2278.513113] env[62405]: DEBUG oslo_vmware.api [None req-9e7c160b-3400-49a9-bda7-b09bf39fd5a4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948426, 'name': PowerOnVM_Task, 'duration_secs': 0.35816} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2278.513609] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e7c160b-3400-49a9-bda7-b09bf39fd5a4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2279.395845] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2279.525762] env[62405]: INFO nova.compute.manager [None req-9e7c160b-3400-49a9-bda7-b09bf39fd5a4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Updating instance to original state: 'active' [ 2280.400900] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2280.401264] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Starting heal instance info cache {{(pid=62405) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10446}} [ 2280.602801] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4c5ff7c2-5fe7-4afd-98ac-7c7ef663f985 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Acquiring lock "27928d2b-1ed5-4326-81e4-1dade794c6a7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2280.603222] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4c5ff7c2-5fe7-4afd-98ac-7c7ef663f985 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Lock "27928d2b-1ed5-4326-81e4-1dade794c6a7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2280.603370] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4c5ff7c2-5fe7-4afd-98ac-7c7ef663f985 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Acquiring lock "27928d2b-1ed5-4326-81e4-1dade794c6a7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2280.603585] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4c5ff7c2-5fe7-4afd-98ac-7c7ef663f985 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Lock "27928d2b-1ed5-4326-81e4-1dade794c6a7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2280.603794] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4c5ff7c2-5fe7-4afd-98ac-7c7ef663f985 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Lock "27928d2b-1ed5-4326-81e4-1dade794c6a7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2280.606069] env[62405]: INFO nova.compute.manager [None req-4c5ff7c2-5fe7-4afd-98ac-7c7ef663f985 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Terminating instance [ 2281.109894] env[62405]: DEBUG nova.compute.manager [None req-4c5ff7c2-5fe7-4afd-98ac-7c7ef663f985 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2281.110185] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-4c5ff7c2-5fe7-4afd-98ac-7c7ef663f985 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2281.111105] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dff9897-0d04-4a83-841e-76b2e2ef981b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2281.119285] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c5ff7c2-5fe7-4afd-98ac-7c7ef663f985 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2281.119514] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-512bb1c3-7753-4d5b-b497-b2eff4713d23 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2281.125286] env[62405]: DEBUG oslo_vmware.api [None req-4c5ff7c2-5fe7-4afd-98ac-7c7ef663f985 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Waiting for the task: (returnval){ [ 2281.125286] env[62405]: value = "task-1948429" [ 2281.125286] env[62405]: _type = "Task" [ 2281.125286] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2281.132987] env[62405]: DEBUG oslo_vmware.api [None req-4c5ff7c2-5fe7-4afd-98ac-7c7ef663f985 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948429, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2281.635235] env[62405]: DEBUG oslo_vmware.api [None req-4c5ff7c2-5fe7-4afd-98ac-7c7ef663f985 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948429, 'name': PowerOffVM_Task, 'duration_secs': 0.184819} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2281.635594] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c5ff7c2-5fe7-4afd-98ac-7c7ef663f985 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2281.635685] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-4c5ff7c2-5fe7-4afd-98ac-7c7ef663f985 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2281.635900] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d03849f3-38a7-46e9-802f-c7115dd4601e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2281.910630] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Skipping network cache update for instance because it is being deleted. {{(pid=62405) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10495}} [ 2281.910763] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Didn't find any instances for network info cache update. {{(pid=62405) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10532}} [ 2282.040218] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-4c5ff7c2-5fe7-4afd-98ac-7c7ef663f985 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2282.040463] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-4c5ff7c2-5fe7-4afd-98ac-7c7ef663f985 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2282.040649] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c5ff7c2-5fe7-4afd-98ac-7c7ef663f985 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Deleting the datastore file [datastore1] 27928d2b-1ed5-4326-81e4-1dade794c6a7 {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2282.040913] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b75c6584-5eb0-498f-bc0a-04a5dc6c3b12 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2282.047898] env[62405]: DEBUG oslo_vmware.api [None req-4c5ff7c2-5fe7-4afd-98ac-7c7ef663f985 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Waiting for the task: (returnval){ [ 2282.047898] env[62405]: value = "task-1948431" [ 2282.047898] env[62405]: _type = "Task" [ 2282.047898] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2282.054861] env[62405]: DEBUG oslo_vmware.api [None req-4c5ff7c2-5fe7-4afd-98ac-7c7ef663f985 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948431, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2282.401814] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager.update_available_resource {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2282.557359] env[62405]: DEBUG oslo_vmware.api [None req-4c5ff7c2-5fe7-4afd-98ac-7c7ef663f985 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948431, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.158791} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2282.557552] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c5ff7c2-5fe7-4afd-98ac-7c7ef663f985 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2282.557733] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-4c5ff7c2-5fe7-4afd-98ac-7c7ef663f985 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2282.557911] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-4c5ff7c2-5fe7-4afd-98ac-7c7ef663f985 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2282.558099] env[62405]: INFO nova.compute.manager [None req-4c5ff7c2-5fe7-4afd-98ac-7c7ef663f985 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Took 1.45 seconds to destroy the instance on the hypervisor. [ 2282.558354] env[62405]: DEBUG oslo.service.loopingcall [None req-4c5ff7c2-5fe7-4afd-98ac-7c7ef663f985 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2282.558554] env[62405]: DEBUG nova.compute.manager [-] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2282.558654] env[62405]: DEBUG nova.network.neutron [-] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2282.904641] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2282.905068] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2282.905122] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2282.905262] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62405) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2282.906161] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68580443-872a-444f-b663-f3a205ff4b48 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2282.914901] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a07b6966-2981-44d6-be6f-8aaa26791c02 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2282.930535] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9891deef-c847-4213-a7ca-d57b5288b0e3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2282.937684] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f403a4cb-05bd-44ca-a468-31a32415a728 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2282.972519] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180921MB free_disk=171GB free_vcpus=48 pci_devices=None {{(pid=62405) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2282.972707] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2282.972921] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2283.001689] env[62405]: DEBUG nova.compute.manager [req-96fe3119-3bb0-4b35-99ac-4262bedb43bc req-d7772373-3629-4ab8-854d-bbf87fe83570 service nova] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Received event network-vif-deleted-9cd0704c-b882-4e84-a2fd-533974d3bbee {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2283.001817] env[62405]: INFO nova.compute.manager [req-96fe3119-3bb0-4b35-99ac-4262bedb43bc req-d7772373-3629-4ab8-854d-bbf87fe83570 service nova] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Neutron deleted interface 9cd0704c-b882-4e84-a2fd-533974d3bbee; detaching it from the instance and deleting it from the info cache [ 2283.001998] env[62405]: DEBUG nova.network.neutron [req-96fe3119-3bb0-4b35-99ac-4262bedb43bc req-d7772373-3629-4ab8-854d-bbf87fe83570 service nova] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2283.477828] env[62405]: DEBUG nova.network.neutron [-] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2283.505085] env[62405]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e1a37f90-c953-4dfe-8a16-2477379cde28 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2283.514471] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99aff023-b9cb-4164-9ae3-6615f90d9546 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2283.538861] env[62405]: DEBUG nova.compute.manager [req-96fe3119-3bb0-4b35-99ac-4262bedb43bc req-d7772373-3629-4ab8-854d-bbf87fe83570 service nova] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Detach interface failed, port_id=9cd0704c-b882-4e84-a2fd-533974d3bbee, reason: Instance 27928d2b-1ed5-4326-81e4-1dade794c6a7 could not be found. {{(pid=62405) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11483}} [ 2283.982936] env[62405]: INFO nova.compute.manager [-] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Took 1.42 seconds to deallocate network for instance. [ 2284.000279] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 46b794f6-e858-45e6-9977-98ab246482f3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2284.000453] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 3e67c74a-1879-4e74-afad-cd7446f284b3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2284.000567] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 27928d2b-1ed5-4326-81e4-1dade794c6a7 actively managed on this compute host and has allocations in placement: {'resources': {'VCPU': 1, 'MEMORY_MB': 192, 'DISK_GB': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2284.000741] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Total usable vcpus: 48, total allocated vcpus: 3 {{(pid=62405) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2284.000881] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1088MB phys_disk=200GB used_disk=3GB total_vcpus=48 used_vcpus=3 pci_stats=[] {{(pid=62405) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2284.046564] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed7b54bb-2083-4f94-9ade-63c298ec099e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2284.053643] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4148133-e811-4d24-ac88-cdb7d804e6e4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2284.081624] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03c10b9c-73ec-4c85-8695-49518acf2f7b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2284.088580] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1872aa4b-7323-438c-8b7f-5e683699103a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2284.102114] env[62405]: DEBUG nova.compute.provider_tree [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2284.489764] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4c5ff7c2-5fe7-4afd-98ac-7c7ef663f985 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2284.621084] env[62405]: ERROR nova.scheduler.client.report [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [req-8a0052b8-414e-42bd-9859-a3f40ee0325e] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7d5eded7-a501-4fa6-b1d3-60e273d555d7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-8a0052b8-414e-42bd-9859-a3f40ee0325e"}]} [ 2284.636156] env[62405]: DEBUG nova.scheduler.client.report [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Refreshing inventories for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 2284.647466] env[62405]: DEBUG nova.scheduler.client.report [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Updating ProviderTree inventory for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 172, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 2284.647634] env[62405]: DEBUG nova.compute.provider_tree [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 172, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2284.657078] env[62405]: DEBUG nova.scheduler.client.report [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Refreshing aggregate associations for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7, aggregates: None {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 2284.674738] env[62405]: DEBUG nova.scheduler.client.report [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Refreshing trait associations for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 2284.714965] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42ce9f6a-8439-400b-ba62-6ea8be58392a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2284.722347] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-062fccef-aab8-4a7c-b5f9-70ef3bfda3c5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2284.751198] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de8352e6-4fdb-4744-aa75-42192e93ed50 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2284.757841] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b533f77e-4728-4dee-9466-0470eddeca6b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2284.770503] env[62405]: DEBUG nova.compute.provider_tree [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2285.300654] env[62405]: DEBUG nova.scheduler.client.report [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Updated inventory for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with generation 198 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 2285.300933] env[62405]: DEBUG nova.compute.provider_tree [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Updating resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 generation from 198 to 199 during operation: update_inventory {{(pid=62405) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2285.301112] env[62405]: DEBUG nova.compute.provider_tree [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2285.806586] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62405) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2285.806817] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.834s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2285.807092] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4c5ff7c2-5fe7-4afd-98ac-7c7ef663f985 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.317s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2285.807404] env[62405]: DEBUG nova.objects.instance [None req-4c5ff7c2-5fe7-4afd-98ac-7c7ef663f985 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Lazy-loading 'resources' on Instance uuid 27928d2b-1ed5-4326-81e4-1dade794c6a7 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2286.363985] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95646e82-a0c4-4ced-b086-b690e594d062 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2286.371163] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e5c8eb5-310d-4f3f-bb84-1f4628890e31 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2286.399315] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b828629-6121-4cf8-ad3c-59a310ce4d11 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2286.406216] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9648817d-28ce-4b24-8e3f-fdfb3e89cee9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2286.418478] env[62405]: DEBUG nova.compute.provider_tree [None req-4c5ff7c2-5fe7-4afd-98ac-7c7ef663f985 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2286.921394] env[62405]: DEBUG nova.scheduler.client.report [None req-4c5ff7c2-5fe7-4afd-98ac-7c7ef663f985 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2287.427106] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4c5ff7c2-5fe7-4afd-98ac-7c7ef663f985 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.620s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2287.447515] env[62405]: INFO nova.scheduler.client.report [None req-4c5ff7c2-5fe7-4afd-98ac-7c7ef663f985 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Deleted allocations for instance 27928d2b-1ed5-4326-81e4-1dade794c6a7 [ 2287.954968] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4c5ff7c2-5fe7-4afd-98ac-7c7ef663f985 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Lock "27928d2b-1ed5-4326-81e4-1dade794c6a7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.352s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2288.687249] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7fdaec5b-c25c-4bbc-a51f-6eb559a83c1d tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Acquiring lock "99efb053-2a9f-47b3-94a4-1063d33fba6f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2288.687581] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7fdaec5b-c25c-4bbc-a51f-6eb559a83c1d tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Lock "99efb053-2a9f-47b3-94a4-1063d33fba6f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2289.190793] env[62405]: DEBUG nova.compute.manager [None req-7fdaec5b-c25c-4bbc-a51f-6eb559a83c1d tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 99efb053-2a9f-47b3-94a4-1063d33fba6f] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2289.714112] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7fdaec5b-c25c-4bbc-a51f-6eb559a83c1d tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2289.714376] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7fdaec5b-c25c-4bbc-a51f-6eb559a83c1d tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2289.715883] env[62405]: INFO nova.compute.claims [None req-7fdaec5b-c25c-4bbc-a51f-6eb559a83c1d tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 99efb053-2a9f-47b3-94a4-1063d33fba6f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2290.843566] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-607b20cc-8323-476e-a56e-495c61ce5aad {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2290.851162] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ea22c78-432c-4097-9c89-ffc7674b533d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2290.879702] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2aeeeff0-8269-4c26-90bb-1b7d206f78bb {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2290.886554] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1562669b-3d38-4a79-95ad-20ae375c07b4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2290.899933] env[62405]: DEBUG nova.compute.provider_tree [None req-7fdaec5b-c25c-4bbc-a51f-6eb559a83c1d tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2291.403399] env[62405]: DEBUG nova.scheduler.client.report [None req-7fdaec5b-c25c-4bbc-a51f-6eb559a83c1d tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2291.908782] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7fdaec5b-c25c-4bbc-a51f-6eb559a83c1d tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.194s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2291.909304] env[62405]: DEBUG nova.compute.manager [None req-7fdaec5b-c25c-4bbc-a51f-6eb559a83c1d tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 99efb053-2a9f-47b3-94a4-1063d33fba6f] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2292.414898] env[62405]: DEBUG nova.compute.utils [None req-7fdaec5b-c25c-4bbc-a51f-6eb559a83c1d tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2292.416430] env[62405]: DEBUG nova.compute.manager [None req-7fdaec5b-c25c-4bbc-a51f-6eb559a83c1d tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 99efb053-2a9f-47b3-94a4-1063d33fba6f] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2292.416611] env[62405]: DEBUG nova.network.neutron [None req-7fdaec5b-c25c-4bbc-a51f-6eb559a83c1d tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 99efb053-2a9f-47b3-94a4-1063d33fba6f] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2292.466198] env[62405]: DEBUG nova.policy [None req-7fdaec5b-c25c-4bbc-a51f-6eb559a83c1d tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f9c18747ac7149dba0e1c0a8fc6c0b7e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dd9a1a4650b34e388c50c7575cf09a7c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 2292.717653] env[62405]: DEBUG nova.network.neutron [None req-7fdaec5b-c25c-4bbc-a51f-6eb559a83c1d tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 99efb053-2a9f-47b3-94a4-1063d33fba6f] Successfully created port: abec3ae6-fcd4-4f06-9a87-b53bf3a52d7a {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2292.920354] env[62405]: DEBUG nova.compute.manager [None req-7fdaec5b-c25c-4bbc-a51f-6eb559a83c1d tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 99efb053-2a9f-47b3-94a4-1063d33fba6f] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2293.931958] env[62405]: DEBUG nova.compute.manager [None req-7fdaec5b-c25c-4bbc-a51f-6eb559a83c1d tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 99efb053-2a9f-47b3-94a4-1063d33fba6f] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2293.960818] env[62405]: DEBUG nova.virt.hardware [None req-7fdaec5b-c25c-4bbc-a51f-6eb559a83c1d tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-21T03:04:00Z,direct_url=,disk_format='vmdk',id=e6bba7a8-c2de-41dc-871a-3859bba5f4f9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='69dc3a146cd14230b1180689e2fea090',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-21T03:04:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2293.961092] env[62405]: DEBUG nova.virt.hardware [None req-7fdaec5b-c25c-4bbc-a51f-6eb559a83c1d tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2293.961257] env[62405]: DEBUG nova.virt.hardware [None req-7fdaec5b-c25c-4bbc-a51f-6eb559a83c1d tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2293.961440] env[62405]: DEBUG nova.virt.hardware [None req-7fdaec5b-c25c-4bbc-a51f-6eb559a83c1d tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2293.961588] env[62405]: DEBUG nova.virt.hardware [None req-7fdaec5b-c25c-4bbc-a51f-6eb559a83c1d tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2293.961736] env[62405]: DEBUG nova.virt.hardware [None req-7fdaec5b-c25c-4bbc-a51f-6eb559a83c1d tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2293.962012] env[62405]: DEBUG nova.virt.hardware [None req-7fdaec5b-c25c-4bbc-a51f-6eb559a83c1d tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2293.962217] env[62405]: DEBUG nova.virt.hardware [None req-7fdaec5b-c25c-4bbc-a51f-6eb559a83c1d tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2293.962405] env[62405]: DEBUG nova.virt.hardware [None req-7fdaec5b-c25c-4bbc-a51f-6eb559a83c1d tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2293.962582] env[62405]: DEBUG nova.virt.hardware [None req-7fdaec5b-c25c-4bbc-a51f-6eb559a83c1d tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2293.962755] env[62405]: DEBUG nova.virt.hardware [None req-7fdaec5b-c25c-4bbc-a51f-6eb559a83c1d tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2293.963622] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d223ff47-58c5-4f71-b6d1-2e0fd3bb301f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2293.971635] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04f9f77f-3ba7-4aad-8afd-30c5a5cc249b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2294.087313] env[62405]: DEBUG nova.compute.manager [req-b94b861a-d504-4d26-aaef-81684c575bfb req-b910ada6-64e5-4254-9813-c7af63bc823c service nova] [instance: 99efb053-2a9f-47b3-94a4-1063d33fba6f] Received event network-vif-plugged-abec3ae6-fcd4-4f06-9a87-b53bf3a52d7a {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2294.087663] env[62405]: DEBUG oslo_concurrency.lockutils [req-b94b861a-d504-4d26-aaef-81684c575bfb req-b910ada6-64e5-4254-9813-c7af63bc823c service nova] Acquiring lock "99efb053-2a9f-47b3-94a4-1063d33fba6f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2294.087827] env[62405]: DEBUG oslo_concurrency.lockutils [req-b94b861a-d504-4d26-aaef-81684c575bfb req-b910ada6-64e5-4254-9813-c7af63bc823c service nova] Lock "99efb053-2a9f-47b3-94a4-1063d33fba6f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2294.087939] env[62405]: DEBUG oslo_concurrency.lockutils [req-b94b861a-d504-4d26-aaef-81684c575bfb req-b910ada6-64e5-4254-9813-c7af63bc823c service nova] Lock "99efb053-2a9f-47b3-94a4-1063d33fba6f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2294.088172] env[62405]: DEBUG nova.compute.manager [req-b94b861a-d504-4d26-aaef-81684c575bfb req-b910ada6-64e5-4254-9813-c7af63bc823c service nova] [instance: 99efb053-2a9f-47b3-94a4-1063d33fba6f] No waiting events found dispatching network-vif-plugged-abec3ae6-fcd4-4f06-9a87-b53bf3a52d7a {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2294.088346] env[62405]: WARNING nova.compute.manager [req-b94b861a-d504-4d26-aaef-81684c575bfb req-b910ada6-64e5-4254-9813-c7af63bc823c service nova] [instance: 99efb053-2a9f-47b3-94a4-1063d33fba6f] Received unexpected event network-vif-plugged-abec3ae6-fcd4-4f06-9a87-b53bf3a52d7a for instance with vm_state building and task_state spawning. [ 2294.186373] env[62405]: DEBUG nova.network.neutron [None req-7fdaec5b-c25c-4bbc-a51f-6eb559a83c1d tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 99efb053-2a9f-47b3-94a4-1063d33fba6f] Successfully updated port: abec3ae6-fcd4-4f06-9a87-b53bf3a52d7a {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2294.688488] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7fdaec5b-c25c-4bbc-a51f-6eb559a83c1d tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Acquiring lock "refresh_cache-99efb053-2a9f-47b3-94a4-1063d33fba6f" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2294.688732] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7fdaec5b-c25c-4bbc-a51f-6eb559a83c1d tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Acquired lock "refresh_cache-99efb053-2a9f-47b3-94a4-1063d33fba6f" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2294.688904] env[62405]: DEBUG nova.network.neutron [None req-7fdaec5b-c25c-4bbc-a51f-6eb559a83c1d tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 99efb053-2a9f-47b3-94a4-1063d33fba6f] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2295.095918] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4fb56cce-322f-4b59-b48d-20e1f24e70b5 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Acquiring lock "c7a31930-a713-4aa0-a983-f17c48bfc64d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2295.096210] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4fb56cce-322f-4b59-b48d-20e1f24e70b5 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Lock "c7a31930-a713-4aa0-a983-f17c48bfc64d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2295.421225] env[62405]: DEBUG nova.network.neutron [None req-7fdaec5b-c25c-4bbc-a51f-6eb559a83c1d tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 99efb053-2a9f-47b3-94a4-1063d33fba6f] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2295.568897] env[62405]: DEBUG nova.network.neutron [None req-7fdaec5b-c25c-4bbc-a51f-6eb559a83c1d tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 99efb053-2a9f-47b3-94a4-1063d33fba6f] Updating instance_info_cache with network_info: [{"id": "abec3ae6-fcd4-4f06-9a87-b53bf3a52d7a", "address": "fa:16:3e:dd:be:23", "network": {"id": "f9883b88-e1ff-4499-9214-4cc672383a10", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1580742860-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dd9a1a4650b34e388c50c7575cf09a7c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0a88b707-352e-4be7-b1d6-ad6074b40ed9", "external-id": "nsx-vlan-transportzone-789", "segmentation_id": 789, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapabec3ae6-fc", "ovs_interfaceid": "abec3ae6-fcd4-4f06-9a87-b53bf3a52d7a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2295.597958] env[62405]: DEBUG nova.compute.manager [None req-4fb56cce-322f-4b59-b48d-20e1f24e70b5 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Starting instance... {{(pid=62405) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2296.071773] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7fdaec5b-c25c-4bbc-a51f-6eb559a83c1d tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Releasing lock "refresh_cache-99efb053-2a9f-47b3-94a4-1063d33fba6f" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2296.072140] env[62405]: DEBUG nova.compute.manager [None req-7fdaec5b-c25c-4bbc-a51f-6eb559a83c1d tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 99efb053-2a9f-47b3-94a4-1063d33fba6f] Instance network_info: |[{"id": "abec3ae6-fcd4-4f06-9a87-b53bf3a52d7a", "address": "fa:16:3e:dd:be:23", "network": {"id": "f9883b88-e1ff-4499-9214-4cc672383a10", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1580742860-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dd9a1a4650b34e388c50c7575cf09a7c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0a88b707-352e-4be7-b1d6-ad6074b40ed9", "external-id": "nsx-vlan-transportzone-789", "segmentation_id": 789, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapabec3ae6-fc", "ovs_interfaceid": "abec3ae6-fcd4-4f06-9a87-b53bf3a52d7a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2296.072623] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-7fdaec5b-c25c-4bbc-a51f-6eb559a83c1d tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 99efb053-2a9f-47b3-94a4-1063d33fba6f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:dd:be:23', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0a88b707-352e-4be7-b1d6-ad6074b40ed9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'abec3ae6-fcd4-4f06-9a87-b53bf3a52d7a', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2296.080012] env[62405]: DEBUG oslo.service.loopingcall [None req-7fdaec5b-c25c-4bbc-a51f-6eb559a83c1d tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2296.080285] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 99efb053-2a9f-47b3-94a4-1063d33fba6f] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2296.080530] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f1d13659-c563-4460-9986-b3244cc8cebd {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2296.100444] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2296.100444] env[62405]: value = "task-1948437" [ 2296.100444] env[62405]: _type = "Task" [ 2296.100444] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2296.110498] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1948437, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2296.115308] env[62405]: DEBUG nova.compute.manager [req-618fdb44-8820-490f-8124-c29bc8a5bae2 req-f14b2074-1800-4cb6-a8b0-de3d312f07d4 service nova] [instance: 99efb053-2a9f-47b3-94a4-1063d33fba6f] Received event network-changed-abec3ae6-fcd4-4f06-9a87-b53bf3a52d7a {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2296.115512] env[62405]: DEBUG nova.compute.manager [req-618fdb44-8820-490f-8124-c29bc8a5bae2 req-f14b2074-1800-4cb6-a8b0-de3d312f07d4 service nova] [instance: 99efb053-2a9f-47b3-94a4-1063d33fba6f] Refreshing instance network info cache due to event network-changed-abec3ae6-fcd4-4f06-9a87-b53bf3a52d7a. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 2296.115685] env[62405]: DEBUG oslo_concurrency.lockutils [req-618fdb44-8820-490f-8124-c29bc8a5bae2 req-f14b2074-1800-4cb6-a8b0-de3d312f07d4 service nova] Acquiring lock "refresh_cache-99efb053-2a9f-47b3-94a4-1063d33fba6f" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2296.115825] env[62405]: DEBUG oslo_concurrency.lockutils [req-618fdb44-8820-490f-8124-c29bc8a5bae2 req-f14b2074-1800-4cb6-a8b0-de3d312f07d4 service nova] Acquired lock "refresh_cache-99efb053-2a9f-47b3-94a4-1063d33fba6f" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2296.115986] env[62405]: DEBUG nova.network.neutron [req-618fdb44-8820-490f-8124-c29bc8a5bae2 req-f14b2074-1800-4cb6-a8b0-de3d312f07d4 service nova] [instance: 99efb053-2a9f-47b3-94a4-1063d33fba6f] Refreshing network info cache for port abec3ae6-fcd4-4f06-9a87-b53bf3a52d7a {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2296.125789] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4fb56cce-322f-4b59-b48d-20e1f24e70b5 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2296.126028] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4fb56cce-322f-4b59-b48d-20e1f24e70b5 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2296.127507] env[62405]: INFO nova.compute.claims [None req-4fb56cce-322f-4b59-b48d-20e1f24e70b5 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2296.610175] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1948437, 'name': CreateVM_Task, 'duration_secs': 0.397394} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2296.610363] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 99efb053-2a9f-47b3-94a4-1063d33fba6f] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2296.611035] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7fdaec5b-c25c-4bbc-a51f-6eb559a83c1d tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2296.611207] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7fdaec5b-c25c-4bbc-a51f-6eb559a83c1d tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2296.611518] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7fdaec5b-c25c-4bbc-a51f-6eb559a83c1d tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2296.611761] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2e917217-38df-4453-a2a5-57dd59520f71 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2296.616078] env[62405]: DEBUG oslo_vmware.api [None req-7fdaec5b-c25c-4bbc-a51f-6eb559a83c1d tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Waiting for the task: (returnval){ [ 2296.616078] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]524291b4-b086-e8f8-366d-843e8064db1a" [ 2296.616078] env[62405]: _type = "Task" [ 2296.616078] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2296.625088] env[62405]: DEBUG oslo_vmware.api [None req-7fdaec5b-c25c-4bbc-a51f-6eb559a83c1d tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]524291b4-b086-e8f8-366d-843e8064db1a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2296.849386] env[62405]: DEBUG nova.network.neutron [req-618fdb44-8820-490f-8124-c29bc8a5bae2 req-f14b2074-1800-4cb6-a8b0-de3d312f07d4 service nova] [instance: 99efb053-2a9f-47b3-94a4-1063d33fba6f] Updated VIF entry in instance network info cache for port abec3ae6-fcd4-4f06-9a87-b53bf3a52d7a. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2296.849766] env[62405]: DEBUG nova.network.neutron [req-618fdb44-8820-490f-8124-c29bc8a5bae2 req-f14b2074-1800-4cb6-a8b0-de3d312f07d4 service nova] [instance: 99efb053-2a9f-47b3-94a4-1063d33fba6f] Updating instance_info_cache with network_info: [{"id": "abec3ae6-fcd4-4f06-9a87-b53bf3a52d7a", "address": "fa:16:3e:dd:be:23", "network": {"id": "f9883b88-e1ff-4499-9214-4cc672383a10", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1580742860-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dd9a1a4650b34e388c50c7575cf09a7c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0a88b707-352e-4be7-b1d6-ad6074b40ed9", "external-id": "nsx-vlan-transportzone-789", "segmentation_id": 789, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapabec3ae6-fc", "ovs_interfaceid": "abec3ae6-fcd4-4f06-9a87-b53bf3a52d7a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2297.127186] env[62405]: DEBUG oslo_vmware.api [None req-7fdaec5b-c25c-4bbc-a51f-6eb559a83c1d tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]524291b4-b086-e8f8-366d-843e8064db1a, 'name': SearchDatastore_Task, 'duration_secs': 0.008739} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2297.127552] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7fdaec5b-c25c-4bbc-a51f-6eb559a83c1d tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2297.127700] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-7fdaec5b-c25c-4bbc-a51f-6eb559a83c1d tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 99efb053-2a9f-47b3-94a4-1063d33fba6f] Processing image e6bba7a8-c2de-41dc-871a-3859bba5f4f9 {{(pid=62405) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2297.127933] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7fdaec5b-c25c-4bbc-a51f-6eb559a83c1d tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2297.128094] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7fdaec5b-c25c-4bbc-a51f-6eb559a83c1d tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2297.128300] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-7fdaec5b-c25c-4bbc-a51f-6eb559a83c1d tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2297.128554] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5d834b75-d1f5-437d-9fc3-dc3ebb555433 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2297.137279] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-7fdaec5b-c25c-4bbc-a51f-6eb559a83c1d tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62405) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2297.137487] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-7fdaec5b-c25c-4bbc-a51f-6eb559a83c1d tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62405) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2297.138173] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7067a5f1-ba95-4bee-a7b0-7a8855fbb34c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2297.143371] env[62405]: DEBUG oslo_vmware.api [None req-7fdaec5b-c25c-4bbc-a51f-6eb559a83c1d tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Waiting for the task: (returnval){ [ 2297.143371] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]527ac39f-7cf4-beb1-e96d-af2cfb6f5e80" [ 2297.143371] env[62405]: _type = "Task" [ 2297.143371] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2297.151640] env[62405]: DEBUG oslo_vmware.api [None req-7fdaec5b-c25c-4bbc-a51f-6eb559a83c1d tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]527ac39f-7cf4-beb1-e96d-af2cfb6f5e80, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2297.194365] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b91113fe-cf92-4ab5-8dd8-669dbf6cc952 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2297.201440] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10e486d5-9d37-42ea-8a58-f9fe3af44c55 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2297.230181] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69b8750d-24d1-407a-bc00-52d3da582c27 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2297.236972] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe7a8869-7087-4e9d-90a0-88f647e6faa7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2297.249655] env[62405]: DEBUG nova.compute.provider_tree [None req-4fb56cce-322f-4b59-b48d-20e1f24e70b5 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2297.352940] env[62405]: DEBUG oslo_concurrency.lockutils [req-618fdb44-8820-490f-8124-c29bc8a5bae2 req-f14b2074-1800-4cb6-a8b0-de3d312f07d4 service nova] Releasing lock "refresh_cache-99efb053-2a9f-47b3-94a4-1063d33fba6f" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2297.653270] env[62405]: DEBUG oslo_vmware.api [None req-7fdaec5b-c25c-4bbc-a51f-6eb559a83c1d tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]527ac39f-7cf4-beb1-e96d-af2cfb6f5e80, 'name': SearchDatastore_Task, 'duration_secs': 0.00931} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2297.654045] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b80db0a4-ff8d-4aae-9da3-3dad56f875d6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2297.658802] env[62405]: DEBUG oslo_vmware.api [None req-7fdaec5b-c25c-4bbc-a51f-6eb559a83c1d tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Waiting for the task: (returnval){ [ 2297.658802] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52648a83-3bd2-0d53-e756-bc6c422c9a48" [ 2297.658802] env[62405]: _type = "Task" [ 2297.658802] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2297.666309] env[62405]: DEBUG oslo_vmware.api [None req-7fdaec5b-c25c-4bbc-a51f-6eb559a83c1d tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52648a83-3bd2-0d53-e756-bc6c422c9a48, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2297.753599] env[62405]: DEBUG nova.scheduler.client.report [None req-4fb56cce-322f-4b59-b48d-20e1f24e70b5 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2298.169390] env[62405]: DEBUG oslo_vmware.api [None req-7fdaec5b-c25c-4bbc-a51f-6eb559a83c1d tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52648a83-3bd2-0d53-e756-bc6c422c9a48, 'name': SearchDatastore_Task, 'duration_secs': 0.01} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2298.169693] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7fdaec5b-c25c-4bbc-a51f-6eb559a83c1d tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2298.169921] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fdaec5b-c25c-4bbc-a51f-6eb559a83c1d tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 99efb053-2a9f-47b3-94a4-1063d33fba6f/99efb053-2a9f-47b3-94a4-1063d33fba6f.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2298.170182] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ee20c275-aae0-4702-8c18-49304913db44 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2298.177114] env[62405]: DEBUG oslo_vmware.api [None req-7fdaec5b-c25c-4bbc-a51f-6eb559a83c1d tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Waiting for the task: (returnval){ [ 2298.177114] env[62405]: value = "task-1948438" [ 2298.177114] env[62405]: _type = "Task" [ 2298.177114] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2298.184957] env[62405]: DEBUG oslo_vmware.api [None req-7fdaec5b-c25c-4bbc-a51f-6eb559a83c1d tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948438, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2298.258971] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4fb56cce-322f-4b59-b48d-20e1f24e70b5 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.133s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2298.259371] env[62405]: DEBUG nova.compute.manager [None req-4fb56cce-322f-4b59-b48d-20e1f24e70b5 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Start building networks asynchronously for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2298.687333] env[62405]: DEBUG oslo_vmware.api [None req-7fdaec5b-c25c-4bbc-a51f-6eb559a83c1d tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948438, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.401464} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2298.687601] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fdaec5b-c25c-4bbc-a51f-6eb559a83c1d tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e6bba7a8-c2de-41dc-871a-3859bba5f4f9/e6bba7a8-c2de-41dc-871a-3859bba5f4f9.vmdk to [datastore1] 99efb053-2a9f-47b3-94a4-1063d33fba6f/99efb053-2a9f-47b3-94a4-1063d33fba6f.vmdk {{(pid=62405) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2298.687810] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-7fdaec5b-c25c-4bbc-a51f-6eb559a83c1d tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 99efb053-2a9f-47b3-94a4-1063d33fba6f] Extending root virtual disk to 1048576 {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2298.688062] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d47c9270-42cb-4369-bfa4-5dcd9be1b620 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2298.694062] env[62405]: DEBUG oslo_vmware.api [None req-7fdaec5b-c25c-4bbc-a51f-6eb559a83c1d tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Waiting for the task: (returnval){ [ 2298.694062] env[62405]: value = "task-1948439" [ 2298.694062] env[62405]: _type = "Task" [ 2298.694062] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2298.701238] env[62405]: DEBUG oslo_vmware.api [None req-7fdaec5b-c25c-4bbc-a51f-6eb559a83c1d tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948439, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2298.765033] env[62405]: DEBUG nova.compute.utils [None req-4fb56cce-322f-4b59-b48d-20e1f24e70b5 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Using /dev/sd instead of None {{(pid=62405) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2298.766208] env[62405]: DEBUG nova.compute.manager [None req-4fb56cce-322f-4b59-b48d-20e1f24e70b5 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Allocating IP information in the background. {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2298.766352] env[62405]: DEBUG nova.network.neutron [None req-4fb56cce-322f-4b59-b48d-20e1f24e70b5 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] allocate_for_instance() {{(pid=62405) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2298.804530] env[62405]: DEBUG nova.policy [None req-4fb56cce-322f-4b59-b48d-20e1f24e70b5 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b4ac1534df994c18bad62ec85acbc69f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '03a423f493034065bb1591d14d215ed8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62405) authorize /opt/stack/nova/nova/policy.py:192}} [ 2299.127383] env[62405]: DEBUG nova.network.neutron [None req-4fb56cce-322f-4b59-b48d-20e1f24e70b5 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Successfully created port: 38b1eaa6-abab-4503-83af-9b3f4a753e47 {{(pid=62405) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2299.203836] env[62405]: DEBUG oslo_vmware.api [None req-7fdaec5b-c25c-4bbc-a51f-6eb559a83c1d tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948439, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061886} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2299.204194] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-7fdaec5b-c25c-4bbc-a51f-6eb559a83c1d tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 99efb053-2a9f-47b3-94a4-1063d33fba6f] Extended root virtual disk {{(pid=62405) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2299.204872] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04d7f772-7949-4f78-97a9-5ce8d6f351d9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2299.226134] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-7fdaec5b-c25c-4bbc-a51f-6eb559a83c1d tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 99efb053-2a9f-47b3-94a4-1063d33fba6f] Reconfiguring VM instance instance-0000007b to attach disk [datastore1] 99efb053-2a9f-47b3-94a4-1063d33fba6f/99efb053-2a9f-47b3-94a4-1063d33fba6f.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2299.226370] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fbc7631c-0069-448b-a7ff-6d354d5d58b2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2299.244752] env[62405]: DEBUG oslo_vmware.api [None req-7fdaec5b-c25c-4bbc-a51f-6eb559a83c1d tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Waiting for the task: (returnval){ [ 2299.244752] env[62405]: value = "task-1948440" [ 2299.244752] env[62405]: _type = "Task" [ 2299.244752] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2299.251821] env[62405]: DEBUG oslo_vmware.api [None req-7fdaec5b-c25c-4bbc-a51f-6eb559a83c1d tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948440, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2299.269729] env[62405]: DEBUG nova.compute.manager [None req-4fb56cce-322f-4b59-b48d-20e1f24e70b5 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Start building block device mappings for instance. {{(pid=62405) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2299.754369] env[62405]: DEBUG oslo_vmware.api [None req-7fdaec5b-c25c-4bbc-a51f-6eb559a83c1d tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948440, 'name': ReconfigVM_Task, 'duration_secs': 0.313968} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2299.754659] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-7fdaec5b-c25c-4bbc-a51f-6eb559a83c1d tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 99efb053-2a9f-47b3-94a4-1063d33fba6f] Reconfigured VM instance instance-0000007b to attach disk [datastore1] 99efb053-2a9f-47b3-94a4-1063d33fba6f/99efb053-2a9f-47b3-94a4-1063d33fba6f.vmdk or device None with type sparse {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2299.755312] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7890e19f-6084-4bea-8a88-6be5ffb15970 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2299.761289] env[62405]: DEBUG oslo_vmware.api [None req-7fdaec5b-c25c-4bbc-a51f-6eb559a83c1d tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Waiting for the task: (returnval){ [ 2299.761289] env[62405]: value = "task-1948441" [ 2299.761289] env[62405]: _type = "Task" [ 2299.761289] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2299.769694] env[62405]: DEBUG oslo_vmware.api [None req-7fdaec5b-c25c-4bbc-a51f-6eb559a83c1d tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948441, 'name': Rename_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2299.775154] env[62405]: INFO nova.virt.block_device [None req-4fb56cce-322f-4b59-b48d-20e1f24e70b5 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Booting with volume b4f70023-9fa7-4af9-a4cd-74e0dc15e429 at /dev/sda [ 2299.820939] env[62405]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-44d07308-dedd-42ae-89bc-ad3795591b9a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2299.829944] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ade104c-3817-40fe-9d7c-b39267e6f4fc {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2299.855781] env[62405]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d2b9b53e-b9f7-48de-8848-c6fbbe0672a0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2299.863879] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9d0be4b-4625-4d20-9731-67992a163cfb {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2299.888199] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ebe6854-04c5-43fb-b413-2df2cf21ade6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2299.894337] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff2f117b-d182-41dc-ad2d-70073bddce56 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2299.907200] env[62405]: DEBUG nova.virt.block_device [None req-4fb56cce-322f-4b59-b48d-20e1f24e70b5 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Updating existing volume attachment record: 068081f5-67a8-4640-a630-c80835465347 {{(pid=62405) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 2300.270981] env[62405]: DEBUG oslo_vmware.api [None req-7fdaec5b-c25c-4bbc-a51f-6eb559a83c1d tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948441, 'name': Rename_Task} progress is 14%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2300.494089] env[62405]: DEBUG nova.compute.manager [req-d8b22250-7ee1-468e-a7e7-0e3ea4cce561 req-36a50348-35b9-459f-b03f-4709c57be6ee service nova] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Received event network-vif-plugged-38b1eaa6-abab-4503-83af-9b3f4a753e47 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2300.494400] env[62405]: DEBUG oslo_concurrency.lockutils [req-d8b22250-7ee1-468e-a7e7-0e3ea4cce561 req-36a50348-35b9-459f-b03f-4709c57be6ee service nova] Acquiring lock "c7a31930-a713-4aa0-a983-f17c48bfc64d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2300.494683] env[62405]: DEBUG oslo_concurrency.lockutils [req-d8b22250-7ee1-468e-a7e7-0e3ea4cce561 req-36a50348-35b9-459f-b03f-4709c57be6ee service nova] Lock "c7a31930-a713-4aa0-a983-f17c48bfc64d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2300.494894] env[62405]: DEBUG oslo_concurrency.lockutils [req-d8b22250-7ee1-468e-a7e7-0e3ea4cce561 req-36a50348-35b9-459f-b03f-4709c57be6ee service nova] Lock "c7a31930-a713-4aa0-a983-f17c48bfc64d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2300.495261] env[62405]: DEBUG nova.compute.manager [req-d8b22250-7ee1-468e-a7e7-0e3ea4cce561 req-36a50348-35b9-459f-b03f-4709c57be6ee service nova] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] No waiting events found dispatching network-vif-plugged-38b1eaa6-abab-4503-83af-9b3f4a753e47 {{(pid=62405) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2300.495464] env[62405]: WARNING nova.compute.manager [req-d8b22250-7ee1-468e-a7e7-0e3ea4cce561 req-36a50348-35b9-459f-b03f-4709c57be6ee service nova] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Received unexpected event network-vif-plugged-38b1eaa6-abab-4503-83af-9b3f4a753e47 for instance with vm_state building and task_state block_device_mapping. [ 2300.576496] env[62405]: DEBUG nova.network.neutron [None req-4fb56cce-322f-4b59-b48d-20e1f24e70b5 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Successfully updated port: 38b1eaa6-abab-4503-83af-9b3f4a753e47 {{(pid=62405) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2300.771520] env[62405]: DEBUG oslo_vmware.api [None req-7fdaec5b-c25c-4bbc-a51f-6eb559a83c1d tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948441, 'name': Rename_Task, 'duration_secs': 0.891995} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2300.771781] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fdaec5b-c25c-4bbc-a51f-6eb559a83c1d tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 99efb053-2a9f-47b3-94a4-1063d33fba6f] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2300.772021] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e15b4055-9576-487a-8903-1f7d6ccc4181 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2300.777470] env[62405]: DEBUG oslo_vmware.api [None req-7fdaec5b-c25c-4bbc-a51f-6eb559a83c1d tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Waiting for the task: (returnval){ [ 2300.777470] env[62405]: value = "task-1948442" [ 2300.777470] env[62405]: _type = "Task" [ 2300.777470] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2300.784519] env[62405]: DEBUG oslo_vmware.api [None req-7fdaec5b-c25c-4bbc-a51f-6eb559a83c1d tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948442, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2301.079064] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4fb56cce-322f-4b59-b48d-20e1f24e70b5 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Acquiring lock "refresh_cache-c7a31930-a713-4aa0-a983-f17c48bfc64d" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2301.079064] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4fb56cce-322f-4b59-b48d-20e1f24e70b5 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Acquired lock "refresh_cache-c7a31930-a713-4aa0-a983-f17c48bfc64d" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2301.079064] env[62405]: DEBUG nova.network.neutron [None req-4fb56cce-322f-4b59-b48d-20e1f24e70b5 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2301.287964] env[62405]: DEBUG oslo_vmware.api [None req-7fdaec5b-c25c-4bbc-a51f-6eb559a83c1d tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948442, 'name': PowerOnVM_Task, 'duration_secs': 0.448879} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2301.288269] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fdaec5b-c25c-4bbc-a51f-6eb559a83c1d tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 99efb053-2a9f-47b3-94a4-1063d33fba6f] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2301.288491] env[62405]: INFO nova.compute.manager [None req-7fdaec5b-c25c-4bbc-a51f-6eb559a83c1d tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 99efb053-2a9f-47b3-94a4-1063d33fba6f] Took 7.36 seconds to spawn the instance on the hypervisor. [ 2301.288671] env[62405]: DEBUG nova.compute.manager [None req-7fdaec5b-c25c-4bbc-a51f-6eb559a83c1d tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 99efb053-2a9f-47b3-94a4-1063d33fba6f] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2301.289421] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-475d1622-5649-4c9d-a153-d277d7a4b96e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2301.610502] env[62405]: DEBUG nova.network.neutron [None req-4fb56cce-322f-4b59-b48d-20e1f24e70b5 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Instance cache missing network info. {{(pid=62405) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2301.729229] env[62405]: DEBUG nova.network.neutron [None req-4fb56cce-322f-4b59-b48d-20e1f24e70b5 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Updating instance_info_cache with network_info: [{"id": "38b1eaa6-abab-4503-83af-9b3f4a753e47", "address": "fa:16:3e:77:6a:40", "network": {"id": "a7be14dc-1fb5-4a85-a574-ff7086958535", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-359688506-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03a423f493034065bb1591d14d215ed8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77aa121f-8fb6-42f3-aaea-43addfe449b2", "external-id": "nsx-vlan-transportzone-288", "segmentation_id": 288, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap38b1eaa6-ab", "ovs_interfaceid": "38b1eaa6-abab-4503-83af-9b3f4a753e47", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2301.806545] env[62405]: INFO nova.compute.manager [None req-7fdaec5b-c25c-4bbc-a51f-6eb559a83c1d tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 99efb053-2a9f-47b3-94a4-1063d33fba6f] Took 12.11 seconds to build instance. [ 2301.988555] env[62405]: DEBUG nova.compute.manager [None req-4fb56cce-322f-4b59-b48d-20e1f24e70b5 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Start spawning the instance on the hypervisor. {{(pid=62405) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2301.989113] env[62405]: DEBUG nova.virt.hardware [None req-4fb56cce-322f-4b59-b48d-20e1f24e70b5 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2301.989338] env[62405]: DEBUG nova.virt.hardware [None req-4fb56cce-322f-4b59-b48d-20e1f24e70b5 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2301.989499] env[62405]: DEBUG nova.virt.hardware [None req-4fb56cce-322f-4b59-b48d-20e1f24e70b5 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2301.989681] env[62405]: DEBUG nova.virt.hardware [None req-4fb56cce-322f-4b59-b48d-20e1f24e70b5 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2301.989828] env[62405]: DEBUG nova.virt.hardware [None req-4fb56cce-322f-4b59-b48d-20e1f24e70b5 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2301.989975] env[62405]: DEBUG nova.virt.hardware [None req-4fb56cce-322f-4b59-b48d-20e1f24e70b5 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2301.990217] env[62405]: DEBUG nova.virt.hardware [None req-4fb56cce-322f-4b59-b48d-20e1f24e70b5 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2301.990406] env[62405]: DEBUG nova.virt.hardware [None req-4fb56cce-322f-4b59-b48d-20e1f24e70b5 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2301.990574] env[62405]: DEBUG nova.virt.hardware [None req-4fb56cce-322f-4b59-b48d-20e1f24e70b5 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2301.990739] env[62405]: DEBUG nova.virt.hardware [None req-4fb56cce-322f-4b59-b48d-20e1f24e70b5 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2301.990911] env[62405]: DEBUG nova.virt.hardware [None req-4fb56cce-322f-4b59-b48d-20e1f24e70b5 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2301.991950] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cb707a4-760a-44ba-b993-99e4d4d632a6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2302.001399] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-840c1e30-7c1d-4c0e-aebf-25d274866b99 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2302.232174] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4fb56cce-322f-4b59-b48d-20e1f24e70b5 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Releasing lock "refresh_cache-c7a31930-a713-4aa0-a983-f17c48bfc64d" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2302.232568] env[62405]: DEBUG nova.compute.manager [None req-4fb56cce-322f-4b59-b48d-20e1f24e70b5 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Instance network_info: |[{"id": "38b1eaa6-abab-4503-83af-9b3f4a753e47", "address": "fa:16:3e:77:6a:40", "network": {"id": "a7be14dc-1fb5-4a85-a574-ff7086958535", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-359688506-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03a423f493034065bb1591d14d215ed8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77aa121f-8fb6-42f3-aaea-43addfe449b2", "external-id": "nsx-vlan-transportzone-288", "segmentation_id": 288, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap38b1eaa6-ab", "ovs_interfaceid": "38b1eaa6-abab-4503-83af-9b3f4a753e47", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62405) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2302.233032] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-4fb56cce-322f-4b59-b48d-20e1f24e70b5 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:77:6a:40', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '77aa121f-8fb6-42f3-aaea-43addfe449b2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '38b1eaa6-abab-4503-83af-9b3f4a753e47', 'vif_model': 'vmxnet3'}] {{(pid=62405) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2302.240284] env[62405]: DEBUG oslo.service.loopingcall [None req-4fb56cce-322f-4b59-b48d-20e1f24e70b5 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2302.240497] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Creating VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2302.240712] env[62405]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1082dcb7-7c38-4ce7-a45f-6e96cc2a736f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2302.260235] env[62405]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2302.260235] env[62405]: value = "task-1948443" [ 2302.260235] env[62405]: _type = "Task" [ 2302.260235] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2302.267861] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1948443, 'name': CreateVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2302.308646] env[62405]: DEBUG oslo_concurrency.lockutils [None req-7fdaec5b-c25c-4bbc-a51f-6eb559a83c1d tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Lock "99efb053-2a9f-47b3-94a4-1063d33fba6f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.621s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2302.520813] env[62405]: DEBUG nova.compute.manager [req-8233549a-c9f8-4ae5-baf3-215fc8820a3a req-b1d18137-69a7-4a9e-9f5b-f6cc2906ecad service nova] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Received event network-changed-38b1eaa6-abab-4503-83af-9b3f4a753e47 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2302.521014] env[62405]: DEBUG nova.compute.manager [req-8233549a-c9f8-4ae5-baf3-215fc8820a3a req-b1d18137-69a7-4a9e-9f5b-f6cc2906ecad service nova] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Refreshing instance network info cache due to event network-changed-38b1eaa6-abab-4503-83af-9b3f4a753e47. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 2302.521240] env[62405]: DEBUG oslo_concurrency.lockutils [req-8233549a-c9f8-4ae5-baf3-215fc8820a3a req-b1d18137-69a7-4a9e-9f5b-f6cc2906ecad service nova] Acquiring lock "refresh_cache-c7a31930-a713-4aa0-a983-f17c48bfc64d" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2302.521385] env[62405]: DEBUG oslo_concurrency.lockutils [req-8233549a-c9f8-4ae5-baf3-215fc8820a3a req-b1d18137-69a7-4a9e-9f5b-f6cc2906ecad service nova] Acquired lock "refresh_cache-c7a31930-a713-4aa0-a983-f17c48bfc64d" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2302.521543] env[62405]: DEBUG nova.network.neutron [req-8233549a-c9f8-4ae5-baf3-215fc8820a3a req-b1d18137-69a7-4a9e-9f5b-f6cc2906ecad service nova] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Refreshing network info cache for port 38b1eaa6-abab-4503-83af-9b3f4a753e47 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2302.773405] env[62405]: DEBUG oslo_vmware.api [-] Task: {'id': task-1948443, 'name': CreateVM_Task, 'duration_secs': 0.369117} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2302.773644] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Created VM on the ESX host {{(pid=62405) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2302.774560] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-4fb56cce-322f-4b59-b48d-20e1f24e70b5 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'boot_index': 0, 'delete_on_termination': True, 'guest_format': None, 'mount_device': '/dev/sda', 'device_type': None, 'disk_bus': None, 'attachment_id': '068081f5-67a8-4640-a630-c80835465347', 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-401620', 'volume_id': 'b4f70023-9fa7-4af9-a4cd-74e0dc15e429', 'name': 'volume-b4f70023-9fa7-4af9-a4cd-74e0dc15e429', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'c7a31930-a713-4aa0-a983-f17c48bfc64d', 'attached_at': '', 'detached_at': '', 'volume_id': 'b4f70023-9fa7-4af9-a4cd-74e0dc15e429', 'serial': 'b4f70023-9fa7-4af9-a4cd-74e0dc15e429'}, 'volume_type': None}], 'swap': None} {{(pid=62405) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 2302.774848] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-4fb56cce-322f-4b59-b48d-20e1f24e70b5 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Root volume attach. Driver type: vmdk {{(pid=62405) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 2302.775920] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ebef6f9-1ec7-4017-98eb-e5d14c85ec3d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2302.786596] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6611e71-5e2c-4480-9e93-7149997e53fb {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2302.794769] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54ad4c59-f572-4a93-932e-a866926caa1e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2302.802179] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-b7214d45-cebc-46cb-82c0-3a138ad71fda {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2302.809088] env[62405]: DEBUG oslo_vmware.api [None req-4fb56cce-322f-4b59-b48d-20e1f24e70b5 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for the task: (returnval){ [ 2302.809088] env[62405]: value = "task-1948444" [ 2302.809088] env[62405]: _type = "Task" [ 2302.809088] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2302.816615] env[62405]: DEBUG oslo_vmware.api [None req-4fb56cce-322f-4b59-b48d-20e1f24e70b5 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948444, 'name': RelocateVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2303.285695] env[62405]: DEBUG nova.network.neutron [req-8233549a-c9f8-4ae5-baf3-215fc8820a3a req-b1d18137-69a7-4a9e-9f5b-f6cc2906ecad service nova] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Updated VIF entry in instance network info cache for port 38b1eaa6-abab-4503-83af-9b3f4a753e47. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2303.286104] env[62405]: DEBUG nova.network.neutron [req-8233549a-c9f8-4ae5-baf3-215fc8820a3a req-b1d18137-69a7-4a9e-9f5b-f6cc2906ecad service nova] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Updating instance_info_cache with network_info: [{"id": "38b1eaa6-abab-4503-83af-9b3f4a753e47", "address": "fa:16:3e:77:6a:40", "network": {"id": "a7be14dc-1fb5-4a85-a574-ff7086958535", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-359688506-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03a423f493034065bb1591d14d215ed8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77aa121f-8fb6-42f3-aaea-43addfe449b2", "external-id": "nsx-vlan-transportzone-288", "segmentation_id": 288, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap38b1eaa6-ab", "ovs_interfaceid": "38b1eaa6-abab-4503-83af-9b3f4a753e47", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2303.322526] env[62405]: DEBUG oslo_vmware.api [None req-4fb56cce-322f-4b59-b48d-20e1f24e70b5 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948444, 'name': RelocateVM_Task, 'duration_secs': 0.365644} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2303.322912] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-4fb56cce-322f-4b59-b48d-20e1f24e70b5 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Volume attach. Driver type: vmdk {{(pid=62405) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 2303.323019] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-4fb56cce-322f-4b59-b48d-20e1f24e70b5 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-401620', 'volume_id': 'b4f70023-9fa7-4af9-a4cd-74e0dc15e429', 'name': 'volume-b4f70023-9fa7-4af9-a4cd-74e0dc15e429', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'c7a31930-a713-4aa0-a983-f17c48bfc64d', 'attached_at': '', 'detached_at': '', 'volume_id': 'b4f70023-9fa7-4af9-a4cd-74e0dc15e429', 'serial': 'b4f70023-9fa7-4af9-a4cd-74e0dc15e429'} {{(pid=62405) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 2303.323863] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f60bb650-fc67-4a5d-8b07-9331f9a12d6c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2303.339471] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8de90ff5-153b-4524-95aa-0b6d68760376 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2303.361642] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-4fb56cce-322f-4b59-b48d-20e1f24e70b5 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Reconfiguring VM instance instance-0000007c to attach disk [datastore1] volume-b4f70023-9fa7-4af9-a4cd-74e0dc15e429/volume-b4f70023-9fa7-4af9-a4cd-74e0dc15e429.vmdk or device None with type thin {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2303.361925] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ae362618-94bb-4cd0-9f53-f824be651d80 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2303.382212] env[62405]: DEBUG oslo_vmware.api [None req-4fb56cce-322f-4b59-b48d-20e1f24e70b5 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for the task: (returnval){ [ 2303.382212] env[62405]: value = "task-1948445" [ 2303.382212] env[62405]: _type = "Task" [ 2303.382212] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2303.392103] env[62405]: DEBUG oslo_vmware.api [None req-4fb56cce-322f-4b59-b48d-20e1f24e70b5 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948445, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2303.788792] env[62405]: DEBUG oslo_concurrency.lockutils [req-8233549a-c9f8-4ae5-baf3-215fc8820a3a req-b1d18137-69a7-4a9e-9f5b-f6cc2906ecad service nova] Releasing lock "refresh_cache-c7a31930-a713-4aa0-a983-f17c48bfc64d" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2303.892743] env[62405]: DEBUG oslo_vmware.api [None req-4fb56cce-322f-4b59-b48d-20e1f24e70b5 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948445, 'name': ReconfigVM_Task, 'duration_secs': 0.253778} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2303.892975] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-4fb56cce-322f-4b59-b48d-20e1f24e70b5 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Reconfigured VM instance instance-0000007c to attach disk [datastore1] volume-b4f70023-9fa7-4af9-a4cd-74e0dc15e429/volume-b4f70023-9fa7-4af9-a4cd-74e0dc15e429.vmdk or device None with type thin {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2303.897551] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a539c450-52eb-475b-a413-5c151380cf83 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2303.913795] env[62405]: DEBUG oslo_vmware.api [None req-4fb56cce-322f-4b59-b48d-20e1f24e70b5 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for the task: (returnval){ [ 2303.913795] env[62405]: value = "task-1948446" [ 2303.913795] env[62405]: _type = "Task" [ 2303.913795] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2303.923644] env[62405]: DEBUG oslo_vmware.api [None req-4fb56cce-322f-4b59-b48d-20e1f24e70b5 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948446, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2304.424332] env[62405]: DEBUG oslo_vmware.api [None req-4fb56cce-322f-4b59-b48d-20e1f24e70b5 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948446, 'name': ReconfigVM_Task, 'duration_secs': 0.138011} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2304.424668] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-4fb56cce-322f-4b59-b48d-20e1f24e70b5 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-401620', 'volume_id': 'b4f70023-9fa7-4af9-a4cd-74e0dc15e429', 'name': 'volume-b4f70023-9fa7-4af9-a4cd-74e0dc15e429', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'c7a31930-a713-4aa0-a983-f17c48bfc64d', 'attached_at': '', 'detached_at': '', 'volume_id': 'b4f70023-9fa7-4af9-a4cd-74e0dc15e429', 'serial': 'b4f70023-9fa7-4af9-a4cd-74e0dc15e429'} {{(pid=62405) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 2304.425094] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fd51c42f-134b-464b-b438-de205491ba8a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2304.431468] env[62405]: DEBUG oslo_vmware.api [None req-4fb56cce-322f-4b59-b48d-20e1f24e70b5 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for the task: (returnval){ [ 2304.431468] env[62405]: value = "task-1948447" [ 2304.431468] env[62405]: _type = "Task" [ 2304.431468] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2304.438682] env[62405]: DEBUG oslo_vmware.api [None req-4fb56cce-322f-4b59-b48d-20e1f24e70b5 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948447, 'name': Rename_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2304.549799] env[62405]: DEBUG nova.compute.manager [req-35ee6ee1-3b9b-46cf-9a96-abdc34157b8a req-0312ee8c-5780-4af9-8513-a61127b42491 service nova] [instance: 99efb053-2a9f-47b3-94a4-1063d33fba6f] Received event network-changed-abec3ae6-fcd4-4f06-9a87-b53bf3a52d7a {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2304.549963] env[62405]: DEBUG nova.compute.manager [req-35ee6ee1-3b9b-46cf-9a96-abdc34157b8a req-0312ee8c-5780-4af9-8513-a61127b42491 service nova] [instance: 99efb053-2a9f-47b3-94a4-1063d33fba6f] Refreshing instance network info cache due to event network-changed-abec3ae6-fcd4-4f06-9a87-b53bf3a52d7a. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 2304.550198] env[62405]: DEBUG oslo_concurrency.lockutils [req-35ee6ee1-3b9b-46cf-9a96-abdc34157b8a req-0312ee8c-5780-4af9-8513-a61127b42491 service nova] Acquiring lock "refresh_cache-99efb053-2a9f-47b3-94a4-1063d33fba6f" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2304.550362] env[62405]: DEBUG oslo_concurrency.lockutils [req-35ee6ee1-3b9b-46cf-9a96-abdc34157b8a req-0312ee8c-5780-4af9-8513-a61127b42491 service nova] Acquired lock "refresh_cache-99efb053-2a9f-47b3-94a4-1063d33fba6f" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2304.550553] env[62405]: DEBUG nova.network.neutron [req-35ee6ee1-3b9b-46cf-9a96-abdc34157b8a req-0312ee8c-5780-4af9-8513-a61127b42491 service nova] [instance: 99efb053-2a9f-47b3-94a4-1063d33fba6f] Refreshing network info cache for port abec3ae6-fcd4-4f06-9a87-b53bf3a52d7a {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2304.942013] env[62405]: DEBUG oslo_vmware.api [None req-4fb56cce-322f-4b59-b48d-20e1f24e70b5 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948447, 'name': Rename_Task, 'duration_secs': 0.127777} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2304.942279] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fb56cce-322f-4b59-b48d-20e1f24e70b5 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2304.942572] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2aa41c55-c3a5-4a36-8745-0f6ad8797c9e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2304.950158] env[62405]: DEBUG oslo_vmware.api [None req-4fb56cce-322f-4b59-b48d-20e1f24e70b5 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for the task: (returnval){ [ 2304.950158] env[62405]: value = "task-1948448" [ 2304.950158] env[62405]: _type = "Task" [ 2304.950158] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2304.957980] env[62405]: DEBUG oslo_vmware.api [None req-4fb56cce-322f-4b59-b48d-20e1f24e70b5 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948448, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2305.253057] env[62405]: DEBUG nova.network.neutron [req-35ee6ee1-3b9b-46cf-9a96-abdc34157b8a req-0312ee8c-5780-4af9-8513-a61127b42491 service nova] [instance: 99efb053-2a9f-47b3-94a4-1063d33fba6f] Updated VIF entry in instance network info cache for port abec3ae6-fcd4-4f06-9a87-b53bf3a52d7a. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2305.253461] env[62405]: DEBUG nova.network.neutron [req-35ee6ee1-3b9b-46cf-9a96-abdc34157b8a req-0312ee8c-5780-4af9-8513-a61127b42491 service nova] [instance: 99efb053-2a9f-47b3-94a4-1063d33fba6f] Updating instance_info_cache with network_info: [{"id": "abec3ae6-fcd4-4f06-9a87-b53bf3a52d7a", "address": "fa:16:3e:dd:be:23", "network": {"id": "f9883b88-e1ff-4499-9214-4cc672383a10", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1580742860-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dd9a1a4650b34e388c50c7575cf09a7c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0a88b707-352e-4be7-b1d6-ad6074b40ed9", "external-id": "nsx-vlan-transportzone-789", "segmentation_id": 789, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapabec3ae6-fc", "ovs_interfaceid": "abec3ae6-fcd4-4f06-9a87-b53bf3a52d7a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2305.460531] env[62405]: DEBUG oslo_vmware.api [None req-4fb56cce-322f-4b59-b48d-20e1f24e70b5 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948448, 'name': PowerOnVM_Task, 'duration_secs': 0.475931} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2305.460902] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fb56cce-322f-4b59-b48d-20e1f24e70b5 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2305.460987] env[62405]: INFO nova.compute.manager [None req-4fb56cce-322f-4b59-b48d-20e1f24e70b5 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Took 3.47 seconds to spawn the instance on the hypervisor. [ 2305.461184] env[62405]: DEBUG nova.compute.manager [None req-4fb56cce-322f-4b59-b48d-20e1f24e70b5 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2305.461920] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b69b38ca-ca79-4675-a7d6-e48bda5bd7e2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2305.756548] env[62405]: DEBUG oslo_concurrency.lockutils [req-35ee6ee1-3b9b-46cf-9a96-abdc34157b8a req-0312ee8c-5780-4af9-8513-a61127b42491 service nova] Releasing lock "refresh_cache-99efb053-2a9f-47b3-94a4-1063d33fba6f" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2305.980614] env[62405]: INFO nova.compute.manager [None req-4fb56cce-322f-4b59-b48d-20e1f24e70b5 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Took 9.88 seconds to build instance. [ 2306.483299] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4fb56cce-322f-4b59-b48d-20e1f24e70b5 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Lock "c7a31930-a713-4aa0-a983-f17c48bfc64d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 11.387s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2307.159787] env[62405]: DEBUG nova.compute.manager [req-0a51e61d-f474-4fa1-ac39-5971ca9848e6 req-8e752bf9-7368-40d7-b197-e2b79cddf9bd service nova] [instance: 46b794f6-e858-45e6-9977-98ab246482f3] Received event network-changed-1cb8209b-2a23-499d-b852-91ad4d89784e {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2307.160037] env[62405]: DEBUG nova.compute.manager [req-0a51e61d-f474-4fa1-ac39-5971ca9848e6 req-8e752bf9-7368-40d7-b197-e2b79cddf9bd service nova] [instance: 46b794f6-e858-45e6-9977-98ab246482f3] Refreshing instance network info cache due to event network-changed-1cb8209b-2a23-499d-b852-91ad4d89784e. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 2307.160260] env[62405]: DEBUG oslo_concurrency.lockutils [req-0a51e61d-f474-4fa1-ac39-5971ca9848e6 req-8e752bf9-7368-40d7-b197-e2b79cddf9bd service nova] Acquiring lock "refresh_cache-46b794f6-e858-45e6-9977-98ab246482f3" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2307.160379] env[62405]: DEBUG oslo_concurrency.lockutils [req-0a51e61d-f474-4fa1-ac39-5971ca9848e6 req-8e752bf9-7368-40d7-b197-e2b79cddf9bd service nova] Acquired lock "refresh_cache-46b794f6-e858-45e6-9977-98ab246482f3" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2307.160537] env[62405]: DEBUG nova.network.neutron [req-0a51e61d-f474-4fa1-ac39-5971ca9848e6 req-8e752bf9-7368-40d7-b197-e2b79cddf9bd service nova] [instance: 46b794f6-e858-45e6-9977-98ab246482f3] Refreshing network info cache for port 1cb8209b-2a23-499d-b852-91ad4d89784e {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2307.855457] env[62405]: DEBUG nova.network.neutron [req-0a51e61d-f474-4fa1-ac39-5971ca9848e6 req-8e752bf9-7368-40d7-b197-e2b79cddf9bd service nova] [instance: 46b794f6-e858-45e6-9977-98ab246482f3] Updated VIF entry in instance network info cache for port 1cb8209b-2a23-499d-b852-91ad4d89784e. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2307.855825] env[62405]: DEBUG nova.network.neutron [req-0a51e61d-f474-4fa1-ac39-5971ca9848e6 req-8e752bf9-7368-40d7-b197-e2b79cddf9bd service nova] [instance: 46b794f6-e858-45e6-9977-98ab246482f3] Updating instance_info_cache with network_info: [{"id": "1cb8209b-2a23-499d-b852-91ad4d89784e", "address": "fa:16:3e:c5:15:8c", "network": {"id": "a7be14dc-1fb5-4a85-a574-ff7086958535", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-359688506-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03a423f493034065bb1591d14d215ed8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77aa121f-8fb6-42f3-aaea-43addfe449b2", "external-id": "nsx-vlan-transportzone-288", "segmentation_id": 288, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1cb8209b-2a", "ovs_interfaceid": "1cb8209b-2a23-499d-b852-91ad4d89784e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2308.358684] env[62405]: DEBUG oslo_concurrency.lockutils [req-0a51e61d-f474-4fa1-ac39-5971ca9848e6 req-8e752bf9-7368-40d7-b197-e2b79cddf9bd service nova] Releasing lock "refresh_cache-46b794f6-e858-45e6-9977-98ab246482f3" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2308.645689] env[62405]: DEBUG nova.compute.manager [None req-86e1d315-ab43-40ad-baa4-7b90b146fe86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Stashing vm_state: active {{(pid=62405) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 2309.172622] env[62405]: DEBUG oslo_concurrency.lockutils [None req-86e1d315-ab43-40ad-baa4-7b90b146fe86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2309.172899] env[62405]: DEBUG oslo_concurrency.lockutils [None req-86e1d315-ab43-40ad-baa4-7b90b146fe86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2309.184139] env[62405]: DEBUG nova.compute.manager [req-81963ecb-3d71-495c-9b43-da42f5eb60bd req-1ff2a381-432f-4177-8145-0071d7d99e8a service nova] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Received event network-changed-38b1eaa6-abab-4503-83af-9b3f4a753e47 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2309.184374] env[62405]: DEBUG nova.compute.manager [req-81963ecb-3d71-495c-9b43-da42f5eb60bd req-1ff2a381-432f-4177-8145-0071d7d99e8a service nova] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Refreshing instance network info cache due to event network-changed-38b1eaa6-abab-4503-83af-9b3f4a753e47. {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11654}} [ 2309.184566] env[62405]: DEBUG oslo_concurrency.lockutils [req-81963ecb-3d71-495c-9b43-da42f5eb60bd req-1ff2a381-432f-4177-8145-0071d7d99e8a service nova] Acquiring lock "refresh_cache-c7a31930-a713-4aa0-a983-f17c48bfc64d" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2309.184713] env[62405]: DEBUG oslo_concurrency.lockutils [req-81963ecb-3d71-495c-9b43-da42f5eb60bd req-1ff2a381-432f-4177-8145-0071d7d99e8a service nova] Acquired lock "refresh_cache-c7a31930-a713-4aa0-a983-f17c48bfc64d" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2309.184875] env[62405]: DEBUG nova.network.neutron [req-81963ecb-3d71-495c-9b43-da42f5eb60bd req-1ff2a381-432f-4177-8145-0071d7d99e8a service nova] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Refreshing network info cache for port 38b1eaa6-abab-4503-83af-9b3f4a753e47 {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2309.677988] env[62405]: INFO nova.compute.claims [None req-86e1d315-ab43-40ad-baa4-7b90b146fe86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2309.882562] env[62405]: DEBUG nova.network.neutron [req-81963ecb-3d71-495c-9b43-da42f5eb60bd req-1ff2a381-432f-4177-8145-0071d7d99e8a service nova] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Updated VIF entry in instance network info cache for port 38b1eaa6-abab-4503-83af-9b3f4a753e47. {{(pid=62405) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2309.882936] env[62405]: DEBUG nova.network.neutron [req-81963ecb-3d71-495c-9b43-da42f5eb60bd req-1ff2a381-432f-4177-8145-0071d7d99e8a service nova] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Updating instance_info_cache with network_info: [{"id": "38b1eaa6-abab-4503-83af-9b3f4a753e47", "address": "fa:16:3e:77:6a:40", "network": {"id": "a7be14dc-1fb5-4a85-a574-ff7086958535", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-359688506-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.188", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03a423f493034065bb1591d14d215ed8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77aa121f-8fb6-42f3-aaea-43addfe449b2", "external-id": "nsx-vlan-transportzone-288", "segmentation_id": 288, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap38b1eaa6-ab", "ovs_interfaceid": "38b1eaa6-abab-4503-83af-9b3f4a753e47", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2310.185074] env[62405]: INFO nova.compute.resource_tracker [None req-86e1d315-ab43-40ad-baa4-7b90b146fe86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Updating resource usage from migration 4e32b4da-073e-46d8-8c7b-8f4e118ba720 [ 2310.255869] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-894de83b-3107-4452-97e9-4eae7595f86b {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2310.263997] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2ffa170-8550-40ba-965a-25039874e059 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2310.293844] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9d4f43a-bf4e-4847-83f5-eb034eb090f6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2310.300903] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36da09a3-c264-4d1a-bc8b-4d05773b2b40 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2310.313805] env[62405]: DEBUG nova.compute.provider_tree [None req-86e1d315-ab43-40ad-baa4-7b90b146fe86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2310.385386] env[62405]: DEBUG oslo_concurrency.lockutils [req-81963ecb-3d71-495c-9b43-da42f5eb60bd req-1ff2a381-432f-4177-8145-0071d7d99e8a service nova] Releasing lock "refresh_cache-c7a31930-a713-4aa0-a983-f17c48bfc64d" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2310.817563] env[62405]: DEBUG nova.scheduler.client.report [None req-86e1d315-ab43-40ad-baa4-7b90b146fe86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2311.323063] env[62405]: DEBUG oslo_concurrency.lockutils [None req-86e1d315-ab43-40ad-baa4-7b90b146fe86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.149s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2311.323063] env[62405]: INFO nova.compute.manager [None req-86e1d315-ab43-40ad-baa4-7b90b146fe86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Migrating [ 2311.839318] env[62405]: DEBUG oslo_concurrency.lockutils [None req-86e1d315-ab43-40ad-baa4-7b90b146fe86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Acquiring lock "refresh_cache-c7a31930-a713-4aa0-a983-f17c48bfc64d" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2311.839530] env[62405]: DEBUG oslo_concurrency.lockutils [None req-86e1d315-ab43-40ad-baa4-7b90b146fe86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Acquired lock "refresh_cache-c7a31930-a713-4aa0-a983-f17c48bfc64d" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2311.839707] env[62405]: DEBUG nova.network.neutron [None req-86e1d315-ab43-40ad-baa4-7b90b146fe86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2312.543012] env[62405]: DEBUG nova.network.neutron [None req-86e1d315-ab43-40ad-baa4-7b90b146fe86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Updating instance_info_cache with network_info: [{"id": "38b1eaa6-abab-4503-83af-9b3f4a753e47", "address": "fa:16:3e:77:6a:40", "network": {"id": "a7be14dc-1fb5-4a85-a574-ff7086958535", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-359688506-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.188", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03a423f493034065bb1591d14d215ed8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77aa121f-8fb6-42f3-aaea-43addfe449b2", "external-id": "nsx-vlan-transportzone-288", "segmentation_id": 288, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap38b1eaa6-ab", "ovs_interfaceid": "38b1eaa6-abab-4503-83af-9b3f4a753e47", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2313.045581] env[62405]: DEBUG oslo_concurrency.lockutils [None req-86e1d315-ab43-40ad-baa4-7b90b146fe86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Releasing lock "refresh_cache-c7a31930-a713-4aa0-a983-f17c48bfc64d" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2314.560270] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8d75e9d-73e6-4643-ac72-4a9116806aeb {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2314.578606] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-86e1d315-ab43-40ad-baa4-7b90b146fe86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Updating instance 'c7a31930-a713-4aa0-a983-f17c48bfc64d' progress to 0 {{(pid=62405) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2315.084755] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-86e1d315-ab43-40ad-baa4-7b90b146fe86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2315.085076] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5e99a32f-d8d7-46f1-8928-c113a997a77d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2315.093133] env[62405]: DEBUG oslo_vmware.api [None req-86e1d315-ab43-40ad-baa4-7b90b146fe86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for the task: (returnval){ [ 2315.093133] env[62405]: value = "task-1948449" [ 2315.093133] env[62405]: _type = "Task" [ 2315.093133] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2315.100583] env[62405]: DEBUG oslo_vmware.api [None req-86e1d315-ab43-40ad-baa4-7b90b146fe86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948449, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2315.603319] env[62405]: DEBUG oslo_vmware.api [None req-86e1d315-ab43-40ad-baa4-7b90b146fe86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948449, 'name': PowerOffVM_Task, 'duration_secs': 0.222729} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2315.604277] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-86e1d315-ab43-40ad-baa4-7b90b146fe86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2315.604277] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-86e1d315-ab43-40ad-baa4-7b90b146fe86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Updating instance 'c7a31930-a713-4aa0-a983-f17c48bfc64d' progress to 17 {{(pid=62405) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2316.110616] env[62405]: DEBUG nova.virt.hardware [None req-86e1d315-ab43-40ad-baa4-7b90b146fe86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:21Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2316.110887] env[62405]: DEBUG nova.virt.hardware [None req-86e1d315-ab43-40ad-baa4-7b90b146fe86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2316.111058] env[62405]: DEBUG nova.virt.hardware [None req-86e1d315-ab43-40ad-baa4-7b90b146fe86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2316.111247] env[62405]: DEBUG nova.virt.hardware [None req-86e1d315-ab43-40ad-baa4-7b90b146fe86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2316.111398] env[62405]: DEBUG nova.virt.hardware [None req-86e1d315-ab43-40ad-baa4-7b90b146fe86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2316.111550] env[62405]: DEBUG nova.virt.hardware [None req-86e1d315-ab43-40ad-baa4-7b90b146fe86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2316.111759] env[62405]: DEBUG nova.virt.hardware [None req-86e1d315-ab43-40ad-baa4-7b90b146fe86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2316.111922] env[62405]: DEBUG nova.virt.hardware [None req-86e1d315-ab43-40ad-baa4-7b90b146fe86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2316.112105] env[62405]: DEBUG nova.virt.hardware [None req-86e1d315-ab43-40ad-baa4-7b90b146fe86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2316.112272] env[62405]: DEBUG nova.virt.hardware [None req-86e1d315-ab43-40ad-baa4-7b90b146fe86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2316.112514] env[62405]: DEBUG nova.virt.hardware [None req-86e1d315-ab43-40ad-baa4-7b90b146fe86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2316.117423] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7af194a3-c31b-40bb-9c1d-f3576de4d5e1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2316.133047] env[62405]: DEBUG oslo_vmware.api [None req-86e1d315-ab43-40ad-baa4-7b90b146fe86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for the task: (returnval){ [ 2316.133047] env[62405]: value = "task-1948450" [ 2316.133047] env[62405]: _type = "Task" [ 2316.133047] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2316.141977] env[62405]: DEBUG oslo_vmware.api [None req-86e1d315-ab43-40ad-baa4-7b90b146fe86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948450, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2316.643399] env[62405]: DEBUG oslo_vmware.api [None req-86e1d315-ab43-40ad-baa4-7b90b146fe86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948450, 'name': ReconfigVM_Task, 'duration_secs': 0.162024} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2316.643779] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-86e1d315-ab43-40ad-baa4-7b90b146fe86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Updating instance 'c7a31930-a713-4aa0-a983-f17c48bfc64d' progress to 33 {{(pid=62405) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2317.149721] env[62405]: DEBUG nova.virt.hardware [None req-86e1d315-ab43-40ad-baa4-7b90b146fe86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-21T03:04:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 2317.150054] env[62405]: DEBUG nova.virt.hardware [None req-86e1d315-ab43-40ad-baa4-7b90b146fe86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Flavor limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2317.150145] env[62405]: DEBUG nova.virt.hardware [None req-86e1d315-ab43-40ad-baa4-7b90b146fe86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Image limits 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 2317.150314] env[62405]: DEBUG nova.virt.hardware [None req-86e1d315-ab43-40ad-baa4-7b90b146fe86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Flavor pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2317.150463] env[62405]: DEBUG nova.virt.hardware [None req-86e1d315-ab43-40ad-baa4-7b90b146fe86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Image pref 0:0:0 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 2317.150615] env[62405]: DEBUG nova.virt.hardware [None req-86e1d315-ab43-40ad-baa4-7b90b146fe86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62405) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 2317.150870] env[62405]: DEBUG nova.virt.hardware [None req-86e1d315-ab43-40ad-baa4-7b90b146fe86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 2317.151050] env[62405]: DEBUG nova.virt.hardware [None req-86e1d315-ab43-40ad-baa4-7b90b146fe86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 2317.151223] env[62405]: DEBUG nova.virt.hardware [None req-86e1d315-ab43-40ad-baa4-7b90b146fe86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Got 1 possible topologies {{(pid=62405) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 2317.151388] env[62405]: DEBUG nova.virt.hardware [None req-86e1d315-ab43-40ad-baa4-7b90b146fe86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 2317.151564] env[62405]: DEBUG nova.virt.hardware [None req-86e1d315-ab43-40ad-baa4-7b90b146fe86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62405) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 2317.156833] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-86e1d315-ab43-40ad-baa4-7b90b146fe86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Reconfiguring VM instance instance-0000007c to detach disk 2000 {{(pid=62405) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2317.157141] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0065cb5c-9428-4a5b-a6cf-8be85ec9f445 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2317.175673] env[62405]: DEBUG oslo_vmware.api [None req-86e1d315-ab43-40ad-baa4-7b90b146fe86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for the task: (returnval){ [ 2317.175673] env[62405]: value = "task-1948451" [ 2317.175673] env[62405]: _type = "Task" [ 2317.175673] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2317.183023] env[62405]: DEBUG oslo_vmware.api [None req-86e1d315-ab43-40ad-baa4-7b90b146fe86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948451, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2317.685195] env[62405]: DEBUG oslo_vmware.api [None req-86e1d315-ab43-40ad-baa4-7b90b146fe86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948451, 'name': ReconfigVM_Task, 'duration_secs': 0.156458} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2317.685498] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-86e1d315-ab43-40ad-baa4-7b90b146fe86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Reconfigured VM instance instance-0000007c to detach disk 2000 {{(pid=62405) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2317.686176] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc53659b-55ac-4699-870d-716d17dc9ea2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2317.707159] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-86e1d315-ab43-40ad-baa4-7b90b146fe86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Reconfiguring VM instance instance-0000007c to attach disk [datastore1] volume-b4f70023-9fa7-4af9-a4cd-74e0dc15e429/volume-b4f70023-9fa7-4af9-a4cd-74e0dc15e429.vmdk or device None with type thin {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2317.707402] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0ebc5893-2abd-4338-b429-495a3dc05642 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2317.724892] env[62405]: DEBUG oslo_vmware.api [None req-86e1d315-ab43-40ad-baa4-7b90b146fe86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for the task: (returnval){ [ 2317.724892] env[62405]: value = "task-1948452" [ 2317.724892] env[62405]: _type = "Task" [ 2317.724892] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2317.732233] env[62405]: DEBUG oslo_vmware.api [None req-86e1d315-ab43-40ad-baa4-7b90b146fe86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948452, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2318.234702] env[62405]: DEBUG oslo_vmware.api [None req-86e1d315-ab43-40ad-baa4-7b90b146fe86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948452, 'name': ReconfigVM_Task, 'duration_secs': 0.266563} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2318.234980] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-86e1d315-ab43-40ad-baa4-7b90b146fe86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Reconfigured VM instance instance-0000007c to attach disk [datastore1] volume-b4f70023-9fa7-4af9-a4cd-74e0dc15e429/volume-b4f70023-9fa7-4af9-a4cd-74e0dc15e429.vmdk or device None with type thin {{(pid=62405) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2318.235276] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-86e1d315-ab43-40ad-baa4-7b90b146fe86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Updating instance 'c7a31930-a713-4aa0-a983-f17c48bfc64d' progress to 50 {{(pid=62405) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2318.742054] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5ae06a8-1d29-4326-a85e-7d484c0b8dea {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2318.760484] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bcf993a-6f2b-4e23-8a08-b413368c80f0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2318.776787] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-86e1d315-ab43-40ad-baa4-7b90b146fe86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Updating instance 'c7a31930-a713-4aa0-a983-f17c48bfc64d' progress to 67 {{(pid=62405) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2320.682139] env[62405]: DEBUG nova.network.neutron [None req-86e1d315-ab43-40ad-baa4-7b90b146fe86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Port 38b1eaa6-abab-4503-83af-9b3f4a753e47 binding to destination host cpu-1 is already ACTIVE {{(pid=62405) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 2321.703416] env[62405]: DEBUG oslo_concurrency.lockutils [None req-86e1d315-ab43-40ad-baa4-7b90b146fe86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Acquiring lock "c7a31930-a713-4aa0-a983-f17c48bfc64d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2321.703838] env[62405]: DEBUG oslo_concurrency.lockutils [None req-86e1d315-ab43-40ad-baa4-7b90b146fe86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Lock "c7a31930-a713-4aa0-a983-f17c48bfc64d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2321.703838] env[62405]: DEBUG oslo_concurrency.lockutils [None req-86e1d315-ab43-40ad-baa4-7b90b146fe86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Lock "c7a31930-a713-4aa0-a983-f17c48bfc64d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2322.737054] env[62405]: DEBUG oslo_concurrency.lockutils [None req-86e1d315-ab43-40ad-baa4-7b90b146fe86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Acquiring lock "refresh_cache-c7a31930-a713-4aa0-a983-f17c48bfc64d" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2322.737299] env[62405]: DEBUG oslo_concurrency.lockutils [None req-86e1d315-ab43-40ad-baa4-7b90b146fe86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Acquired lock "refresh_cache-c7a31930-a713-4aa0-a983-f17c48bfc64d" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2322.737431] env[62405]: DEBUG nova.network.neutron [None req-86e1d315-ab43-40ad-baa4-7b90b146fe86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2323.436518] env[62405]: DEBUG nova.network.neutron [None req-86e1d315-ab43-40ad-baa4-7b90b146fe86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Updating instance_info_cache with network_info: [{"id": "38b1eaa6-abab-4503-83af-9b3f4a753e47", "address": "fa:16:3e:77:6a:40", "network": {"id": "a7be14dc-1fb5-4a85-a574-ff7086958535", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-359688506-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.188", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03a423f493034065bb1591d14d215ed8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77aa121f-8fb6-42f3-aaea-43addfe449b2", "external-id": "nsx-vlan-transportzone-288", "segmentation_id": 288, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap38b1eaa6-ab", "ovs_interfaceid": "38b1eaa6-abab-4503-83af-9b3f4a753e47", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2323.938760] env[62405]: DEBUG oslo_concurrency.lockutils [None req-86e1d315-ab43-40ad-baa4-7b90b146fe86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Releasing lock "refresh_cache-c7a31930-a713-4aa0-a983-f17c48bfc64d" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2324.447936] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a738a1b-c00a-4f39-a287-8718fd0ab338 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2324.455344] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-329d26c3-f96f-4a3d-8dee-a0c28908e94e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2325.551265] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82c7af4d-3e11-435b-8b55-a2d5a1fb76b1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2325.569929] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-857f4553-c173-44dd-9bd2-8983638677eb {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2325.576510] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-86e1d315-ab43-40ad-baa4-7b90b146fe86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Updating instance 'c7a31930-a713-4aa0-a983-f17c48bfc64d' progress to 83 {{(pid=62405) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2326.082363] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-86e1d315-ab43-40ad-baa4-7b90b146fe86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2326.082706] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f6cc26da-0abd-4d6f-aae5-e67f46a3a5e0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2326.090265] env[62405]: DEBUG oslo_vmware.api [None req-86e1d315-ab43-40ad-baa4-7b90b146fe86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for the task: (returnval){ [ 2326.090265] env[62405]: value = "task-1948453" [ 2326.090265] env[62405]: _type = "Task" [ 2326.090265] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2326.104605] env[62405]: DEBUG oslo_vmware.api [None req-86e1d315-ab43-40ad-baa4-7b90b146fe86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948453, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2326.603019] env[62405]: DEBUG oslo_vmware.api [None req-86e1d315-ab43-40ad-baa4-7b90b146fe86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948453, 'name': PowerOnVM_Task, 'duration_secs': 0.385} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2326.603362] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-86e1d315-ab43-40ad-baa4-7b90b146fe86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2326.603474] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-86e1d315-ab43-40ad-baa4-7b90b146fe86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Updating instance 'c7a31930-a713-4aa0-a983-f17c48bfc64d' progress to 100 {{(pid=62405) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2329.031367] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e0663466-73ac-4de3-b5e1-a72f91c3bb43 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Acquiring lock "c7a31930-a713-4aa0-a983-f17c48bfc64d" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2329.031777] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e0663466-73ac-4de3-b5e1-a72f91c3bb43 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Lock "c7a31930-a713-4aa0-a983-f17c48bfc64d" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2329.031822] env[62405]: DEBUG nova.compute.manager [None req-e0663466-73ac-4de3-b5e1-a72f91c3bb43 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Going to confirm migration 10 {{(pid=62405) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 2329.568070] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e0663466-73ac-4de3-b5e1-a72f91c3bb43 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Acquiring lock "refresh_cache-c7a31930-a713-4aa0-a983-f17c48bfc64d" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2329.568272] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e0663466-73ac-4de3-b5e1-a72f91c3bb43 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Acquired lock "refresh_cache-c7a31930-a713-4aa0-a983-f17c48bfc64d" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2329.568448] env[62405]: DEBUG nova.network.neutron [None req-e0663466-73ac-4de3-b5e1-a72f91c3bb43 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2329.568625] env[62405]: DEBUG nova.objects.instance [None req-e0663466-73ac-4de3-b5e1-a72f91c3bb43 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Lazy-loading 'info_cache' on Instance uuid c7a31930-a713-4aa0-a983-f17c48bfc64d {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2330.820147] env[62405]: DEBUG nova.network.neutron [None req-e0663466-73ac-4de3-b5e1-a72f91c3bb43 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Updating instance_info_cache with network_info: [{"id": "38b1eaa6-abab-4503-83af-9b3f4a753e47", "address": "fa:16:3e:77:6a:40", "network": {"id": "a7be14dc-1fb5-4a85-a574-ff7086958535", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-359688506-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.188", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03a423f493034065bb1591d14d215ed8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77aa121f-8fb6-42f3-aaea-43addfe449b2", "external-id": "nsx-vlan-transportzone-288", "segmentation_id": 288, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap38b1eaa6-ab", "ovs_interfaceid": "38b1eaa6-abab-4503-83af-9b3f4a753e47", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2331.322860] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e0663466-73ac-4de3-b5e1-a72f91c3bb43 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Releasing lock "refresh_cache-c7a31930-a713-4aa0-a983-f17c48bfc64d" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2331.323134] env[62405]: DEBUG nova.objects.instance [None req-e0663466-73ac-4de3-b5e1-a72f91c3bb43 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Lazy-loading 'migration_context' on Instance uuid c7a31930-a713-4aa0-a983-f17c48bfc64d {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2331.826395] env[62405]: DEBUG nova.objects.base [None req-e0663466-73ac-4de3-b5e1-a72f91c3bb43 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=62405) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2331.827339] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e3deaa2-609c-4386-bfcd-c1a1c9675025 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2331.846157] env[62405]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b0df9215-25ab-42ad-87f6-a9f3f7948a8f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2331.851206] env[62405]: DEBUG oslo_vmware.api [None req-e0663466-73ac-4de3-b5e1-a72f91c3bb43 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for the task: (returnval){ [ 2331.851206] env[62405]: value = "session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52e7482b-6830-e14d-0913-5b93c7873c8f" [ 2331.851206] env[62405]: _type = "Task" [ 2331.851206] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2331.858266] env[62405]: DEBUG oslo_vmware.api [None req-e0663466-73ac-4de3-b5e1-a72f91c3bb43 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52e7482b-6830-e14d-0913-5b93c7873c8f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2332.361570] env[62405]: DEBUG oslo_vmware.api [None req-e0663466-73ac-4de3-b5e1-a72f91c3bb43 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': session[52ce0cde-69d0-fd57-e41e-119219eb49f4]52e7482b-6830-e14d-0913-5b93c7873c8f, 'name': SearchDatastore_Task, 'duration_secs': 0.010647} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2332.361899] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e0663466-73ac-4de3-b5e1-a72f91c3bb43 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2332.362143] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e0663466-73ac-4de3-b5e1-a72f91c3bb43 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2332.940973] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecc929a7-899f-49fe-91c9-789958192633 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2332.948686] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5496f95-9824-41c4-8dea-dbf4fe3ce895 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2332.979058] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4b1009a-f5f0-49f1-82ce-31d036f0c0ca {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2332.986701] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c660039-b899-43b9-b241-9dd0f33dd837 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2332.999648] env[62405]: DEBUG nova.compute.provider_tree [None req-e0663466-73ac-4de3-b5e1-a72f91c3bb43 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2333.503413] env[62405]: DEBUG nova.scheduler.client.report [None req-e0663466-73ac-4de3-b5e1-a72f91c3bb43 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2334.513777] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e0663466-73ac-4de3-b5e1-a72f91c3bb43 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.151s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2335.064821] env[62405]: INFO nova.scheduler.client.report [None req-e0663466-73ac-4de3-b5e1-a72f91c3bb43 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Deleted allocation for migration 4e32b4da-073e-46d8-8c7b-8f4e118ba720 [ 2335.527883] env[62405]: INFO nova.compute.manager [None req-1ab7d22f-eed8-460a-8829-a3acfabe3948 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Get console output [ 2335.528260] env[62405]: WARNING nova.virt.vmwareapi.driver [None req-1ab7d22f-eed8-460a-8829-a3acfabe3948 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] The console log is missing. Check your VSPC configuration [ 2335.570991] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e0663466-73ac-4de3-b5e1-a72f91c3bb43 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Lock "c7a31930-a713-4aa0-a983-f17c48bfc64d" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.539s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2335.808562] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2336.401564] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2336.401730] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2337.401382] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2338.400730] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2338.631534] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9be2f0d3-8947-47cd-92bf-c108c0155c5a tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Acquiring lock "99efb053-2a9f-47b3-94a4-1063d33fba6f" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2338.631885] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9be2f0d3-8947-47cd-92bf-c108c0155c5a tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Lock "99efb053-2a9f-47b3-94a4-1063d33fba6f" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2338.632048] env[62405]: DEBUG nova.compute.manager [None req-9be2f0d3-8947-47cd-92bf-c108c0155c5a tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 99efb053-2a9f-47b3-94a4-1063d33fba6f] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2338.632938] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23e8ec28-01a3-47cd-8c3b-0141a6ce1d38 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2338.639936] env[62405]: DEBUG nova.compute.manager [None req-9be2f0d3-8947-47cd-92bf-c108c0155c5a tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 99efb053-2a9f-47b3-94a4-1063d33fba6f] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62405) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 2338.640492] env[62405]: DEBUG nova.objects.instance [None req-9be2f0d3-8947-47cd-92bf-c108c0155c5a tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Lazy-loading 'flavor' on Instance uuid 99efb053-2a9f-47b3-94a4-1063d33fba6f {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2339.401181] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2339.401356] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62405) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11065}} [ 2339.648129] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-9be2f0d3-8947-47cd-92bf-c108c0155c5a tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 99efb053-2a9f-47b3-94a4-1063d33fba6f] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2339.648466] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5a8ffdbb-e624-405a-9628-d89a821be2b2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2339.656522] env[62405]: DEBUG oslo_vmware.api [None req-9be2f0d3-8947-47cd-92bf-c108c0155c5a tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Waiting for the task: (returnval){ [ 2339.656522] env[62405]: value = "task-1948454" [ 2339.656522] env[62405]: _type = "Task" [ 2339.656522] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2339.664188] env[62405]: DEBUG oslo_vmware.api [None req-9be2f0d3-8947-47cd-92bf-c108c0155c5a tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948454, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2340.166191] env[62405]: DEBUG oslo_vmware.api [None req-9be2f0d3-8947-47cd-92bf-c108c0155c5a tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948454, 'name': PowerOffVM_Task, 'duration_secs': 0.180807} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2340.166455] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-9be2f0d3-8947-47cd-92bf-c108c0155c5a tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 99efb053-2a9f-47b3-94a4-1063d33fba6f] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2340.166656] env[62405]: DEBUG nova.compute.manager [None req-9be2f0d3-8947-47cd-92bf-c108c0155c5a tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 99efb053-2a9f-47b3-94a4-1063d33fba6f] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2340.167421] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d333dd9-fd50-4f3e-9026-3973dd8a2c78 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2340.396104] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2340.678422] env[62405]: DEBUG oslo_concurrency.lockutils [None req-9be2f0d3-8947-47cd-92bf-c108c0155c5a tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Lock "99efb053-2a9f-47b3-94a4-1063d33fba6f" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.046s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2341.005713] env[62405]: DEBUG nova.objects.instance [None req-b55c7435-8437-4311-a8af-a65a4814baf4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Lazy-loading 'flavor' on Instance uuid 99efb053-2a9f-47b3-94a4-1063d33fba6f {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2341.401513] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2341.401670] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Starting heal instance info cache {{(pid=62405) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10446}} [ 2341.401807] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Rebuilding the list of instances to heal {{(pid=62405) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10450}} [ 2341.510870] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b55c7435-8437-4311-a8af-a65a4814baf4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Acquiring lock "refresh_cache-99efb053-2a9f-47b3-94a4-1063d33fba6f" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2341.511079] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b55c7435-8437-4311-a8af-a65a4814baf4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Acquired lock "refresh_cache-99efb053-2a9f-47b3-94a4-1063d33fba6f" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2341.511263] env[62405]: DEBUG nova.network.neutron [None req-b55c7435-8437-4311-a8af-a65a4814baf4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 99efb053-2a9f-47b3-94a4-1063d33fba6f] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2341.511437] env[62405]: DEBUG nova.objects.instance [None req-b55c7435-8437-4311-a8af-a65a4814baf4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Lazy-loading 'info_cache' on Instance uuid 99efb053-2a9f-47b3-94a4-1063d33fba6f {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2341.944279] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Acquiring lock "refresh_cache-46b794f6-e858-45e6-9977-98ab246482f3" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2341.944591] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Acquired lock "refresh_cache-46b794f6-e858-45e6-9977-98ab246482f3" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2341.944716] env[62405]: DEBUG nova.network.neutron [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 46b794f6-e858-45e6-9977-98ab246482f3] Forcefully refreshing network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 2341.944936] env[62405]: DEBUG nova.objects.instance [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lazy-loading 'info_cache' on Instance uuid 46b794f6-e858-45e6-9977-98ab246482f3 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2342.015421] env[62405]: DEBUG nova.objects.base [None req-b55c7435-8437-4311-a8af-a65a4814baf4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Object Instance<99efb053-2a9f-47b3-94a4-1063d33fba6f> lazy-loaded attributes: flavor,info_cache {{(pid=62405) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2342.717352] env[62405]: DEBUG nova.network.neutron [None req-b55c7435-8437-4311-a8af-a65a4814baf4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 99efb053-2a9f-47b3-94a4-1063d33fba6f] Updating instance_info_cache with network_info: [{"id": "abec3ae6-fcd4-4f06-9a87-b53bf3a52d7a", "address": "fa:16:3e:dd:be:23", "network": {"id": "f9883b88-e1ff-4499-9214-4cc672383a10", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1580742860-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dd9a1a4650b34e388c50c7575cf09a7c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0a88b707-352e-4be7-b1d6-ad6074b40ed9", "external-id": "nsx-vlan-transportzone-789", "segmentation_id": 789, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapabec3ae6-fc", "ovs_interfaceid": "abec3ae6-fcd4-4f06-9a87-b53bf3a52d7a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2343.220119] env[62405]: DEBUG oslo_concurrency.lockutils [None req-b55c7435-8437-4311-a8af-a65a4814baf4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Releasing lock "refresh_cache-99efb053-2a9f-47b3-94a4-1063d33fba6f" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2343.638134] env[62405]: DEBUG nova.network.neutron [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 46b794f6-e858-45e6-9977-98ab246482f3] Updating instance_info_cache with network_info: [{"id": "1cb8209b-2a23-499d-b852-91ad4d89784e", "address": "fa:16:3e:c5:15:8c", "network": {"id": "a7be14dc-1fb5-4a85-a574-ff7086958535", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-359688506-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03a423f493034065bb1591d14d215ed8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77aa121f-8fb6-42f3-aaea-43addfe449b2", "external-id": "nsx-vlan-transportzone-288", "segmentation_id": 288, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1cb8209b-2a", "ovs_interfaceid": "1cb8209b-2a23-499d-b852-91ad4d89784e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2344.141393] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Releasing lock "refresh_cache-46b794f6-e858-45e6-9977-98ab246482f3" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2344.141593] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 46b794f6-e858-45e6-9977-98ab246482f3] Updated the network info_cache for instance {{(pid=62405) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10517}} [ 2344.141831] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager.update_available_resource {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2344.225462] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-b55c7435-8437-4311-a8af-a65a4814baf4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 99efb053-2a9f-47b3-94a4-1063d33fba6f] Powering on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2344.225748] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5914da14-f139-4d37-ab36-fc39de3fd6a1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2344.233421] env[62405]: DEBUG oslo_vmware.api [None req-b55c7435-8437-4311-a8af-a65a4814baf4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Waiting for the task: (returnval){ [ 2344.233421] env[62405]: value = "task-1948455" [ 2344.233421] env[62405]: _type = "Task" [ 2344.233421] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2344.240842] env[62405]: DEBUG oslo_vmware.api [None req-b55c7435-8437-4311-a8af-a65a4814baf4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948455, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2344.644931] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2344.645183] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2344.645355] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2344.645513] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62405) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2344.646431] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb70da1b-4f0d-424c-82fa-6f7f12b96aeb {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2344.654516] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9140ff56-93c0-4036-95c2-9dbd93861e73 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2344.667967] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9fde605-2e52-4d25-b40a-63889ccdb192 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2344.674136] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f27397b-6761-46bc-b78a-f00c25e0abe0 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2344.702307] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181056MB free_disk=171GB free_vcpus=48 pci_devices=None {{(pid=62405) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2344.702465] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2344.702629] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2344.742879] env[62405]: DEBUG oslo_vmware.api [None req-b55c7435-8437-4311-a8af-a65a4814baf4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948455, 'name': PowerOnVM_Task, 'duration_secs': 0.393661} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2344.743161] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-b55c7435-8437-4311-a8af-a65a4814baf4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 99efb053-2a9f-47b3-94a4-1063d33fba6f] Powered on the VM {{(pid=62405) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2344.743363] env[62405]: DEBUG nova.compute.manager [None req-b55c7435-8437-4311-a8af-a65a4814baf4 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 99efb053-2a9f-47b3-94a4-1063d33fba6f] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2344.744116] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb31e37d-ff7f-43b4-adf5-7f5ad227662a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2345.731555] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 46b794f6-e858-45e6-9977-98ab246482f3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2345.731806] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 3e67c74a-1879-4e74-afad-cd7446f284b3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2345.731849] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance 99efb053-2a9f-47b3-94a4-1063d33fba6f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2345.731951] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Instance c7a31930-a713-4aa0-a983-f17c48bfc64d actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62405) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2345.732146] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Total usable vcpus: 48, total allocated vcpus: 4 {{(pid=62405) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2345.732284] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1344MB phys_disk=200GB used_disk=3GB total_vcpus=48 used_vcpus=4 pci_stats=[] {{(pid=62405) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2345.787629] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd784c8d-fa6b-4ab1-9f8e-5c7bf745a067 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2345.795374] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa5ac1f5-4416-4862-8aa7-e7a7ecc0a180 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2345.824755] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ec430d6-2c6e-4700-a746-057a1aa1fdf3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2345.831995] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecbccf19-53c5-44cd-878a-40e40c782b0f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2345.844497] env[62405]: DEBUG nova.compute.provider_tree [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2346.224713] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5e89e01-9c32-4be3-bdfe-9dc50277f370 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2346.231723] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-c47313fd-330c-467e-a2d3-98cf47e80d68 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 99efb053-2a9f-47b3-94a4-1063d33fba6f] Suspending the VM {{(pid=62405) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 2346.231967] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-2fc3f6ce-d753-49ab-9641-2c6d6c7ca115 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2346.238618] env[62405]: DEBUG oslo_vmware.api [None req-c47313fd-330c-467e-a2d3-98cf47e80d68 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Waiting for the task: (returnval){ [ 2346.238618] env[62405]: value = "task-1948456" [ 2346.238618] env[62405]: _type = "Task" [ 2346.238618] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2346.246329] env[62405]: DEBUG oslo_vmware.api [None req-c47313fd-330c-467e-a2d3-98cf47e80d68 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948456, 'name': SuspendVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2346.347594] env[62405]: DEBUG nova.scheduler.client.report [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2346.749679] env[62405]: DEBUG oslo_vmware.api [None req-c47313fd-330c-467e-a2d3-98cf47e80d68 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948456, 'name': SuspendVM_Task} progress is 62%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2346.852710] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62405) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2346.852937] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.150s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2346.853182] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2346.853325] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Cleaning up deleted instances with incomplete migration {{(pid=62405) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11784}} [ 2347.249594] env[62405]: DEBUG oslo_vmware.api [None req-c47313fd-330c-467e-a2d3-98cf47e80d68 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948456, 'name': SuspendVM_Task, 'duration_secs': 0.750323} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2347.249853] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-c47313fd-330c-467e-a2d3-98cf47e80d68 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 99efb053-2a9f-47b3-94a4-1063d33fba6f] Suspended the VM {{(pid=62405) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 2347.250046] env[62405]: DEBUG nova.compute.manager [None req-c47313fd-330c-467e-a2d3-98cf47e80d68 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 99efb053-2a9f-47b3-94a4-1063d33fba6f] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2347.250774] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09b537be-352b-4bef-9b6d-f731bcaad884 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2348.550999] env[62405]: INFO nova.compute.manager [None req-48d75e59-8ca0-4227-ba06-d7c167fded3c tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 99efb053-2a9f-47b3-94a4-1063d33fba6f] Resuming [ 2348.551663] env[62405]: DEBUG nova.objects.instance [None req-48d75e59-8ca0-4227-ba06-d7c167fded3c tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Lazy-loading 'flavor' on Instance uuid 99efb053-2a9f-47b3-94a4-1063d33fba6f {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2349.904200] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2349.904491] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Cleaning up deleted instances {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11746}} [ 2350.062016] env[62405]: DEBUG oslo_concurrency.lockutils [None req-48d75e59-8ca0-4227-ba06-d7c167fded3c tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Acquiring lock "refresh_cache-99efb053-2a9f-47b3-94a4-1063d33fba6f" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2350.062237] env[62405]: DEBUG oslo_concurrency.lockutils [None req-48d75e59-8ca0-4227-ba06-d7c167fded3c tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Acquired lock "refresh_cache-99efb053-2a9f-47b3-94a4-1063d33fba6f" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2350.062400] env[62405]: DEBUG nova.network.neutron [None req-48d75e59-8ca0-4227-ba06-d7c167fded3c tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 99efb053-2a9f-47b3-94a4-1063d33fba6f] Building network info cache for instance {{(pid=62405) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2350.411124] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] There are 17 instances to clean {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11755}} [ 2350.411297] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 27928d2b-1ed5-4326-81e4-1dade794c6a7] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 2350.755188] env[62405]: DEBUG nova.network.neutron [None req-48d75e59-8ca0-4227-ba06-d7c167fded3c tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 99efb053-2a9f-47b3-94a4-1063d33fba6f] Updating instance_info_cache with network_info: [{"id": "abec3ae6-fcd4-4f06-9a87-b53bf3a52d7a", "address": "fa:16:3e:dd:be:23", "network": {"id": "f9883b88-e1ff-4499-9214-4cc672383a10", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1580742860-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dd9a1a4650b34e388c50c7575cf09a7c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0a88b707-352e-4be7-b1d6-ad6074b40ed9", "external-id": "nsx-vlan-transportzone-789", "segmentation_id": 789, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapabec3ae6-fc", "ovs_interfaceid": "abec3ae6-fcd4-4f06-9a87-b53bf3a52d7a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2350.914203] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: f4af587c-08d3-457e-a20d-a5ea8aad311f] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 2351.257617] env[62405]: DEBUG oslo_concurrency.lockutils [None req-48d75e59-8ca0-4227-ba06-d7c167fded3c tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Releasing lock "refresh_cache-99efb053-2a9f-47b3-94a4-1063d33fba6f" {{(pid=62405) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2351.258598] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-652f1fdc-bdbd-4bcb-87e8-dda4e333ec3c {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2351.265889] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-48d75e59-8ca0-4227-ba06-d7c167fded3c tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 99efb053-2a9f-47b3-94a4-1063d33fba6f] Resuming the VM {{(pid=62405) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 2351.266098] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0fb91782-baae-400f-8679-eefe95d0e223 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2351.272405] env[62405]: DEBUG oslo_vmware.api [None req-48d75e59-8ca0-4227-ba06-d7c167fded3c tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Waiting for the task: (returnval){ [ 2351.272405] env[62405]: value = "task-1948457" [ 2351.272405] env[62405]: _type = "Task" [ 2351.272405] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2351.279476] env[62405]: DEBUG oslo_vmware.api [None req-48d75e59-8ca0-4227-ba06-d7c167fded3c tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948457, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2351.417606] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 8185f9bc-48d5-4cb7-a48d-f744ff704868] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 2351.782370] env[62405]: DEBUG oslo_vmware.api [None req-48d75e59-8ca0-4227-ba06-d7c167fded3c tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948457, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2351.921268] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 9c30bac3-d4f0-4779-9f6e-bc83bb84b001] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 2352.284146] env[62405]: DEBUG oslo_vmware.api [None req-48d75e59-8ca0-4227-ba06-d7c167fded3c tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948457, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2352.424156] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: fd311606-a314-4030-9d51-929993ab6b14] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 2352.785837] env[62405]: DEBUG oslo_vmware.api [None req-48d75e59-8ca0-4227-ba06-d7c167fded3c tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948457, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2352.927687] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 34f4f278-bd4d-43f9-af83-adb48cfb0adc] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 2353.285505] env[62405]: DEBUG oslo_vmware.api [None req-48d75e59-8ca0-4227-ba06-d7c167fded3c tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948457, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2353.430975] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 60ccb9f6-29ba-44eb-8cec-0d9b78c235ec] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 2353.786014] env[62405]: DEBUG oslo_vmware.api [None req-48d75e59-8ca0-4227-ba06-d7c167fded3c tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948457, 'name': PowerOnVM_Task, 'duration_secs': 2.212682} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2353.786295] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-48d75e59-8ca0-4227-ba06-d7c167fded3c tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 99efb053-2a9f-47b3-94a4-1063d33fba6f] Resumed the VM {{(pid=62405) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 2353.786478] env[62405]: DEBUG nova.compute.manager [None req-48d75e59-8ca0-4227-ba06-d7c167fded3c tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 99efb053-2a9f-47b3-94a4-1063d33fba6f] Checking state {{(pid=62405) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2353.787233] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67c53991-4878-46ba-b783-57395b5d74e9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2353.934595] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 7c74cae9-1607-4928-a927-f0c8b86f7698] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 2354.437812] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: f1e9a2e7-0fd3-4a89-8c33-bab6d1987230] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 2354.941225] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 58691f22-5acd-45db-b587-df784a000813] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 2355.444707] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: b2eae940-22bc-4c87-842f-30fbd04eba28] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 2355.482435] env[62405]: DEBUG oslo_concurrency.lockutils [None req-dfbe88b3-1e76-4325-9ff3-c28d071741e2 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Acquiring lock "99efb053-2a9f-47b3-94a4-1063d33fba6f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2355.482716] env[62405]: DEBUG oslo_concurrency.lockutils [None req-dfbe88b3-1e76-4325-9ff3-c28d071741e2 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Lock "99efb053-2a9f-47b3-94a4-1063d33fba6f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2355.482967] env[62405]: DEBUG oslo_concurrency.lockutils [None req-dfbe88b3-1e76-4325-9ff3-c28d071741e2 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Acquiring lock "99efb053-2a9f-47b3-94a4-1063d33fba6f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2355.483187] env[62405]: DEBUG oslo_concurrency.lockutils [None req-dfbe88b3-1e76-4325-9ff3-c28d071741e2 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Lock "99efb053-2a9f-47b3-94a4-1063d33fba6f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2355.483369] env[62405]: DEBUG oslo_concurrency.lockutils [None req-dfbe88b3-1e76-4325-9ff3-c28d071741e2 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Lock "99efb053-2a9f-47b3-94a4-1063d33fba6f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2355.485603] env[62405]: INFO nova.compute.manager [None req-dfbe88b3-1e76-4325-9ff3-c28d071741e2 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 99efb053-2a9f-47b3-94a4-1063d33fba6f] Terminating instance [ 2355.948046] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 81d9be97-9147-4754-80c2-68c1a389842e] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 2355.988835] env[62405]: DEBUG nova.compute.manager [None req-dfbe88b3-1e76-4325-9ff3-c28d071741e2 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 99efb053-2a9f-47b3-94a4-1063d33fba6f] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2355.989114] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-dfbe88b3-1e76-4325-9ff3-c28d071741e2 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 99efb053-2a9f-47b3-94a4-1063d33fba6f] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2355.989954] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4da6b843-0334-4d77-baf8-7e507da5d336 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2355.998642] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfbe88b3-1e76-4325-9ff3-c28d071741e2 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 99efb053-2a9f-47b3-94a4-1063d33fba6f] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2355.998867] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5de964cf-6f1e-4c30-b327-b182d657e6a1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2356.004953] env[62405]: DEBUG oslo_vmware.api [None req-dfbe88b3-1e76-4325-9ff3-c28d071741e2 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Waiting for the task: (returnval){ [ 2356.004953] env[62405]: value = "task-1948458" [ 2356.004953] env[62405]: _type = "Task" [ 2356.004953] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2356.012349] env[62405]: DEBUG oslo_vmware.api [None req-dfbe88b3-1e76-4325-9ff3-c28d071741e2 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948458, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2356.450996] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 0d2b305d-d754-413c-afdf-3a2e8f143891] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 2356.514356] env[62405]: DEBUG oslo_vmware.api [None req-dfbe88b3-1e76-4325-9ff3-c28d071741e2 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948458, 'name': PowerOffVM_Task, 'duration_secs': 0.331723} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2356.514619] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfbe88b3-1e76-4325-9ff3-c28d071741e2 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 99efb053-2a9f-47b3-94a4-1063d33fba6f] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2356.514788] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-dfbe88b3-1e76-4325-9ff3-c28d071741e2 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 99efb053-2a9f-47b3-94a4-1063d33fba6f] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2356.515032] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-175b7a4d-4ce1-4fa7-abbd-2e566212d9e5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2356.668682] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-dfbe88b3-1e76-4325-9ff3-c28d071741e2 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 99efb053-2a9f-47b3-94a4-1063d33fba6f] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2356.668938] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-dfbe88b3-1e76-4325-9ff3-c28d071741e2 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 99efb053-2a9f-47b3-94a4-1063d33fba6f] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2356.669104] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-dfbe88b3-1e76-4325-9ff3-c28d071741e2 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Deleting the datastore file [datastore1] 99efb053-2a9f-47b3-94a4-1063d33fba6f {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2356.669373] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-330b2e3b-1b0d-4f46-bedd-6936995ee62f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2356.675426] env[62405]: DEBUG oslo_vmware.api [None req-dfbe88b3-1e76-4325-9ff3-c28d071741e2 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Waiting for the task: (returnval){ [ 2356.675426] env[62405]: value = "task-1948460" [ 2356.675426] env[62405]: _type = "Task" [ 2356.675426] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2356.683206] env[62405]: DEBUG oslo_vmware.api [None req-dfbe88b3-1e76-4325-9ff3-c28d071741e2 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948460, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2356.954195] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: a91a6d04-2ec0-4568-bdb3-732d148644de] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 2357.185285] env[62405]: DEBUG oslo_vmware.api [None req-dfbe88b3-1e76-4325-9ff3-c28d071741e2 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Task: {'id': task-1948460, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.13086} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2357.185528] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-dfbe88b3-1e76-4325-9ff3-c28d071741e2 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2357.185706] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-dfbe88b3-1e76-4325-9ff3-c28d071741e2 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 99efb053-2a9f-47b3-94a4-1063d33fba6f] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2357.185884] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-dfbe88b3-1e76-4325-9ff3-c28d071741e2 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 99efb053-2a9f-47b3-94a4-1063d33fba6f] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2357.186071] env[62405]: INFO nova.compute.manager [None req-dfbe88b3-1e76-4325-9ff3-c28d071741e2 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] [instance: 99efb053-2a9f-47b3-94a4-1063d33fba6f] Took 1.20 seconds to destroy the instance on the hypervisor. [ 2357.186315] env[62405]: DEBUG oslo.service.loopingcall [None req-dfbe88b3-1e76-4325-9ff3-c28d071741e2 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2357.186511] env[62405]: DEBUG nova.compute.manager [-] [instance: 99efb053-2a9f-47b3-94a4-1063d33fba6f] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2357.186606] env[62405]: DEBUG nova.network.neutron [-] [instance: 99efb053-2a9f-47b3-94a4-1063d33fba6f] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2357.457725] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: 6fcfada3-d73a-4814-bf45-d34b26d76d4a] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 2357.657729] env[62405]: DEBUG nova.compute.manager [req-3edf6d8a-07da-43fe-bdb0-8e60201b3ce0 req-ff809e8d-cd9d-45b1-814e-202900aa1e23 service nova] [instance: 99efb053-2a9f-47b3-94a4-1063d33fba6f] Received event network-vif-deleted-abec3ae6-fcd4-4f06-9a87-b53bf3a52d7a {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2357.657729] env[62405]: INFO nova.compute.manager [req-3edf6d8a-07da-43fe-bdb0-8e60201b3ce0 req-ff809e8d-cd9d-45b1-814e-202900aa1e23 service nova] [instance: 99efb053-2a9f-47b3-94a4-1063d33fba6f] Neutron deleted interface abec3ae6-fcd4-4f06-9a87-b53bf3a52d7a; detaching it from the instance and deleting it from the info cache [ 2357.657729] env[62405]: DEBUG nova.network.neutron [req-3edf6d8a-07da-43fe-bdb0-8e60201b3ce0 req-ff809e8d-cd9d-45b1-814e-202900aa1e23 service nova] [instance: 99efb053-2a9f-47b3-94a4-1063d33fba6f] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2357.962845] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: b495f9e6-60c8-4509-a34f-2e7ed59b6d82] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 2358.137354] env[62405]: DEBUG nova.network.neutron [-] [instance: 99efb053-2a9f-47b3-94a4-1063d33fba6f] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2358.159591] env[62405]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-029ec594-e150-40e4-b129-ff3c88fcdca2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2358.171116] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adea4284-4126-4e11-9437-a90dcba71df5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2358.198478] env[62405]: DEBUG nova.compute.manager [req-3edf6d8a-07da-43fe-bdb0-8e60201b3ce0 req-ff809e8d-cd9d-45b1-814e-202900aa1e23 service nova] [instance: 99efb053-2a9f-47b3-94a4-1063d33fba6f] Detach interface failed, port_id=abec3ae6-fcd4-4f06-9a87-b53bf3a52d7a, reason: Instance 99efb053-2a9f-47b3-94a4-1063d33fba6f could not be found. {{(pid=62405) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11483}} [ 2358.469837] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] [instance: c39d9059-8da4-4c8d-99ab-d66b8445e7da] Instance has had 0 of 5 cleanup attempts {{(pid=62405) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11759}} [ 2358.639601] env[62405]: INFO nova.compute.manager [-] [instance: 99efb053-2a9f-47b3-94a4-1063d33fba6f] Took 1.45 seconds to deallocate network for instance. [ 2358.974856] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2359.145689] env[62405]: DEBUG oslo_concurrency.lockutils [None req-dfbe88b3-1e76-4325-9ff3-c28d071741e2 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2359.145959] env[62405]: DEBUG oslo_concurrency.lockutils [None req-dfbe88b3-1e76-4325-9ff3-c28d071741e2 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2359.146204] env[62405]: DEBUG nova.objects.instance [None req-dfbe88b3-1e76-4325-9ff3-c28d071741e2 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Lazy-loading 'resources' on Instance uuid 99efb053-2a9f-47b3-94a4-1063d33fba6f {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2359.711844] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caddca3c-8588-4a4b-89eb-480534d397bd {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2359.719820] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7caa6439-d069-4882-8145-3b3f393069ae {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2359.749627] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fd9a6d8-3256-4ffe-ad0d-46ee80a5bd10 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2359.757099] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b651930b-1cb3-4a92-a7f7-15826a49883e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2359.769813] env[62405]: DEBUG nova.compute.provider_tree [None req-dfbe88b3-1e76-4325-9ff3-c28d071741e2 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2360.272937] env[62405]: DEBUG nova.scheduler.client.report [None req-dfbe88b3-1e76-4325-9ff3-c28d071741e2 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2360.779270] env[62405]: DEBUG oslo_concurrency.lockutils [None req-dfbe88b3-1e76-4325-9ff3-c28d071741e2 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.633s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2360.803208] env[62405]: INFO nova.scheduler.client.report [None req-dfbe88b3-1e76-4325-9ff3-c28d071741e2 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Deleted allocations for instance 99efb053-2a9f-47b3-94a4-1063d33fba6f [ 2360.969998] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2361.310564] env[62405]: DEBUG oslo_concurrency.lockutils [None req-dfbe88b3-1e76-4325-9ff3-c28d071741e2 tempest-ServerActionsTestJSON-1890994719 tempest-ServerActionsTestJSON-1890994719-project-member] Lock "99efb053-2a9f-47b3-94a4-1063d33fba6f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.828s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2362.480779] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4f724467-b092-4067-8edf-9afed9cb6a4c tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Acquiring lock "c7a31930-a713-4aa0-a983-f17c48bfc64d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2362.481113] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4f724467-b092-4067-8edf-9afed9cb6a4c tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Lock "c7a31930-a713-4aa0-a983-f17c48bfc64d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2362.481335] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4f724467-b092-4067-8edf-9afed9cb6a4c tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Acquiring lock "c7a31930-a713-4aa0-a983-f17c48bfc64d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2362.481519] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4f724467-b092-4067-8edf-9afed9cb6a4c tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Lock "c7a31930-a713-4aa0-a983-f17c48bfc64d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2362.481687] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4f724467-b092-4067-8edf-9afed9cb6a4c tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Lock "c7a31930-a713-4aa0-a983-f17c48bfc64d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2362.483827] env[62405]: INFO nova.compute.manager [None req-4f724467-b092-4067-8edf-9afed9cb6a4c tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Terminating instance [ 2362.987713] env[62405]: DEBUG nova.compute.manager [None req-4f724467-b092-4067-8edf-9afed9cb6a4c tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2362.987989] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f724467-b092-4067-8edf-9afed9cb6a4c tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2362.988329] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-922de5df-2ae6-4cb0-a0ce-67dcd00d88d5 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2362.996463] env[62405]: DEBUG oslo_vmware.api [None req-4f724467-b092-4067-8edf-9afed9cb6a4c tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for the task: (returnval){ [ 2362.996463] env[62405]: value = "task-1948461" [ 2362.996463] env[62405]: _type = "Task" [ 2362.996463] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2363.004992] env[62405]: DEBUG oslo_vmware.api [None req-4f724467-b092-4067-8edf-9afed9cb6a4c tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948461, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2363.506125] env[62405]: DEBUG oslo_vmware.api [None req-4f724467-b092-4067-8edf-9afed9cb6a4c tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948461, 'name': PowerOffVM_Task, 'duration_secs': 0.159918} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2363.506402] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f724467-b092-4067-8edf-9afed9cb6a4c tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2363.506654] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-4f724467-b092-4067-8edf-9afed9cb6a4c tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Volume detach. Driver type: vmdk {{(pid=62405) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2363.506786] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-4f724467-b092-4067-8edf-9afed9cb6a4c tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-401620', 'volume_id': 'b4f70023-9fa7-4af9-a4cd-74e0dc15e429', 'name': 'volume-b4f70023-9fa7-4af9-a4cd-74e0dc15e429', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': 'c7a31930-a713-4aa0-a983-f17c48bfc64d', 'attached_at': '2024-12-21T03:35:25.000000', 'detached_at': '', 'volume_id': 'b4f70023-9fa7-4af9-a4cd-74e0dc15e429', 'serial': 'b4f70023-9fa7-4af9-a4cd-74e0dc15e429'} {{(pid=62405) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2363.507536] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f23c8ab-bf9d-4059-952a-056be2a6dbf1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2363.525350] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c62e3630-2ca0-411d-b376-2c1885c215cb {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2363.532598] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-998ccd07-1464-4d1b-bc8c-1df36015c498 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2363.559498] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4dce90f-e821-4ac1-9756-938fac9d28bd {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2363.581040] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-4f724467-b092-4067-8edf-9afed9cb6a4c tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] The volume has not been displaced from its original location: [datastore1] volume-b4f70023-9fa7-4af9-a4cd-74e0dc15e429/volume-b4f70023-9fa7-4af9-a4cd-74e0dc15e429.vmdk. No consolidation needed. {{(pid=62405) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2363.587918] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-4f724467-b092-4067-8edf-9afed9cb6a4c tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Reconfiguring VM instance instance-0000007c to detach disk 2000 {{(pid=62405) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2363.588251] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bf99c7ac-49d5-4502-8f17-c4477dbda40f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2363.605589] env[62405]: DEBUG oslo_vmware.api [None req-4f724467-b092-4067-8edf-9afed9cb6a4c tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for the task: (returnval){ [ 2363.605589] env[62405]: value = "task-1948462" [ 2363.605589] env[62405]: _type = "Task" [ 2363.605589] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2363.614623] env[62405]: DEBUG oslo_vmware.api [None req-4f724467-b092-4067-8edf-9afed9cb6a4c tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948462, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2364.116187] env[62405]: DEBUG oslo_vmware.api [None req-4f724467-b092-4067-8edf-9afed9cb6a4c tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948462, 'name': ReconfigVM_Task, 'duration_secs': 0.152344} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2364.116476] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-4f724467-b092-4067-8edf-9afed9cb6a4c tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Reconfigured VM instance instance-0000007c to detach disk 2000 {{(pid=62405) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2364.121186] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f6222c5b-0214-46fb-b0f7-45a7c69e6541 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2364.136832] env[62405]: DEBUG oslo_vmware.api [None req-4f724467-b092-4067-8edf-9afed9cb6a4c tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for the task: (returnval){ [ 2364.136832] env[62405]: value = "task-1948463" [ 2364.136832] env[62405]: _type = "Task" [ 2364.136832] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2364.145160] env[62405]: DEBUG oslo_vmware.api [None req-4f724467-b092-4067-8edf-9afed9cb6a4c tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948463, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2364.646379] env[62405]: DEBUG oslo_vmware.api [None req-4f724467-b092-4067-8edf-9afed9cb6a4c tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948463, 'name': ReconfigVM_Task, 'duration_secs': 0.130632} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2364.646661] env[62405]: DEBUG nova.virt.vmwareapi.volumeops [None req-4f724467-b092-4067-8edf-9afed9cb6a4c tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-401620', 'volume_id': 'b4f70023-9fa7-4af9-a4cd-74e0dc15e429', 'name': 'volume-b4f70023-9fa7-4af9-a4cd-74e0dc15e429', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': 'c7a31930-a713-4aa0-a983-f17c48bfc64d', 'attached_at': '2024-12-21T03:35:25.000000', 'detached_at': '', 'volume_id': 'b4f70023-9fa7-4af9-a4cd-74e0dc15e429', 'serial': 'b4f70023-9fa7-4af9-a4cd-74e0dc15e429'} {{(pid=62405) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2364.646935] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-4f724467-b092-4067-8edf-9afed9cb6a4c tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2364.647708] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77611d96-a0af-4331-8567-902746f7c9ab {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2364.653893] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-4f724467-b092-4067-8edf-9afed9cb6a4c tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2364.654132] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dc7926f6-4226-4ca0-9b3b-539ce8afabfc {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2364.727500] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-4f724467-b092-4067-8edf-9afed9cb6a4c tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2364.727689] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-4f724467-b092-4067-8edf-9afed9cb6a4c tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2364.727875] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f724467-b092-4067-8edf-9afed9cb6a4c tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Deleting the datastore file [datastore1] c7a31930-a713-4aa0-a983-f17c48bfc64d {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2364.728170] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c4ed8edf-9a9e-49e0-9ced-a7fa9ea695da {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2364.734772] env[62405]: DEBUG oslo_vmware.api [None req-4f724467-b092-4067-8edf-9afed9cb6a4c tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for the task: (returnval){ [ 2364.734772] env[62405]: value = "task-1948465" [ 2364.734772] env[62405]: _type = "Task" [ 2364.734772] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2364.742345] env[62405]: DEBUG oslo_vmware.api [None req-4f724467-b092-4067-8edf-9afed9cb6a4c tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948465, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2365.244543] env[62405]: DEBUG oslo_vmware.api [None req-4f724467-b092-4067-8edf-9afed9cb6a4c tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948465, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.081551} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2365.244894] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f724467-b092-4067-8edf-9afed9cb6a4c tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2365.245101] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-4f724467-b092-4067-8edf-9afed9cb6a4c tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2365.245288] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-4f724467-b092-4067-8edf-9afed9cb6a4c tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2365.245462] env[62405]: INFO nova.compute.manager [None req-4f724467-b092-4067-8edf-9afed9cb6a4c tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Took 2.26 seconds to destroy the instance on the hypervisor. [ 2365.245695] env[62405]: DEBUG oslo.service.loopingcall [None req-4f724467-b092-4067-8edf-9afed9cb6a4c tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2365.245882] env[62405]: DEBUG nova.compute.manager [-] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2365.245976] env[62405]: DEBUG nova.network.neutron [-] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2365.718791] env[62405]: DEBUG nova.compute.manager [req-044a72cd-7cf1-4338-bcc3-239fb17655c3 req-26a45a0b-2bf1-4ed0-b9b1-0a36e86b8586 service nova] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Received event network-vif-deleted-38b1eaa6-abab-4503-83af-9b3f4a753e47 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2365.719047] env[62405]: INFO nova.compute.manager [req-044a72cd-7cf1-4338-bcc3-239fb17655c3 req-26a45a0b-2bf1-4ed0-b9b1-0a36e86b8586 service nova] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Neutron deleted interface 38b1eaa6-abab-4503-83af-9b3f4a753e47; detaching it from the instance and deleting it from the info cache [ 2365.719177] env[62405]: DEBUG nova.network.neutron [req-044a72cd-7cf1-4338-bcc3-239fb17655c3 req-26a45a0b-2bf1-4ed0-b9b1-0a36e86b8586 service nova] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2366.197592] env[62405]: DEBUG nova.network.neutron [-] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2366.221833] env[62405]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-81e2f01d-0d59-456d-a4a4-f291d1c6875a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2366.232579] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e4e6397-8b79-4cf6-9695-d683fa1ce129 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2366.257809] env[62405]: DEBUG nova.compute.manager [req-044a72cd-7cf1-4338-bcc3-239fb17655c3 req-26a45a0b-2bf1-4ed0-b9b1-0a36e86b8586 service nova] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Detach interface failed, port_id=38b1eaa6-abab-4503-83af-9b3f4a753e47, reason: Instance c7a31930-a713-4aa0-a983-f17c48bfc64d could not be found. {{(pid=62405) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11483}} [ 2366.700492] env[62405]: INFO nova.compute.manager [-] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Took 1.45 seconds to deallocate network for instance. [ 2367.244718] env[62405]: INFO nova.compute.manager [None req-4f724467-b092-4067-8edf-9afed9cb6a4c tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Took 0.54 seconds to detach 1 volumes for instance. [ 2367.246852] env[62405]: DEBUG nova.compute.manager [None req-4f724467-b092-4067-8edf-9afed9cb6a4c tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: c7a31930-a713-4aa0-a983-f17c48bfc64d] Deleting volume: b4f70023-9fa7-4af9-a4cd-74e0dc15e429 {{(pid=62405) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 2367.788942] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4f724467-b092-4067-8edf-9afed9cb6a4c tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2367.789184] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4f724467-b092-4067-8edf-9afed9cb6a4c tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2367.789424] env[62405]: DEBUG nova.objects.instance [None req-4f724467-b092-4067-8edf-9afed9cb6a4c tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Lazy-loading 'resources' on Instance uuid c7a31930-a713-4aa0-a983-f17c48bfc64d {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2368.345537] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-315c4805-d520-407a-b1fe-7091f10d8cc1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2368.353196] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e2b8989-0a9e-4507-a5c5-5e6bc343c5c8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2368.383017] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f2a118d-7c50-4285-8388-81533d8a8db9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2368.389806] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a8014dd-1487-4e62-8abc-dbe6c05f4a3e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2368.402191] env[62405]: DEBUG nova.compute.provider_tree [None req-4f724467-b092-4067-8edf-9afed9cb6a4c tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2368.905523] env[62405]: DEBUG nova.scheduler.client.report [None req-4f724467-b092-4067-8edf-9afed9cb6a4c tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2369.411461] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4f724467-b092-4067-8edf-9afed9cb6a4c tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.622s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2369.431389] env[62405]: INFO nova.scheduler.client.report [None req-4f724467-b092-4067-8edf-9afed9cb6a4c tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Deleted allocations for instance c7a31930-a713-4aa0-a983-f17c48bfc64d [ 2369.939031] env[62405]: DEBUG oslo_concurrency.lockutils [None req-4f724467-b092-4067-8edf-9afed9cb6a4c tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Lock "c7a31930-a713-4aa0-a983-f17c48bfc64d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.458s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2370.348814] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c0d128eb-bf9c-4eac-8e3e-ed53ab7b49ed tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Acquiring lock "3e67c74a-1879-4e74-afad-cd7446f284b3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2370.349131] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c0d128eb-bf9c-4eac-8e3e-ed53ab7b49ed tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Lock "3e67c74a-1879-4e74-afad-cd7446f284b3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2370.349356] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c0d128eb-bf9c-4eac-8e3e-ed53ab7b49ed tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Acquiring lock "3e67c74a-1879-4e74-afad-cd7446f284b3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2370.349545] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c0d128eb-bf9c-4eac-8e3e-ed53ab7b49ed tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Lock "3e67c74a-1879-4e74-afad-cd7446f284b3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2370.349716] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c0d128eb-bf9c-4eac-8e3e-ed53ab7b49ed tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Lock "3e67c74a-1879-4e74-afad-cd7446f284b3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2370.351849] env[62405]: INFO nova.compute.manager [None req-c0d128eb-bf9c-4eac-8e3e-ed53ab7b49ed tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 3e67c74a-1879-4e74-afad-cd7446f284b3] Terminating instance [ 2370.855352] env[62405]: DEBUG nova.compute.manager [None req-c0d128eb-bf9c-4eac-8e3e-ed53ab7b49ed tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 3e67c74a-1879-4e74-afad-cd7446f284b3] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2370.855730] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-c0d128eb-bf9c-4eac-8e3e-ed53ab7b49ed tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 3e67c74a-1879-4e74-afad-cd7446f284b3] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2370.856524] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4e0b6a5-83ae-41b1-aadc-01428496bedf {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2370.864449] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0d128eb-bf9c-4eac-8e3e-ed53ab7b49ed tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 3e67c74a-1879-4e74-afad-cd7446f284b3] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2370.864669] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0b82f74b-1541-4734-a5ee-96836161574a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2370.871352] env[62405]: DEBUG oslo_vmware.api [None req-c0d128eb-bf9c-4eac-8e3e-ed53ab7b49ed tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for the task: (returnval){ [ 2370.871352] env[62405]: value = "task-1948467" [ 2370.871352] env[62405]: _type = "Task" [ 2370.871352] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2370.878598] env[62405]: DEBUG oslo_vmware.api [None req-c0d128eb-bf9c-4eac-8e3e-ed53ab7b49ed tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948467, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2371.380741] env[62405]: DEBUG oslo_vmware.api [None req-c0d128eb-bf9c-4eac-8e3e-ed53ab7b49ed tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948467, 'name': PowerOffVM_Task, 'duration_secs': 0.235766} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2371.382068] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0d128eb-bf9c-4eac-8e3e-ed53ab7b49ed tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 3e67c74a-1879-4e74-afad-cd7446f284b3] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2371.382068] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-c0d128eb-bf9c-4eac-8e3e-ed53ab7b49ed tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 3e67c74a-1879-4e74-afad-cd7446f284b3] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2371.382068] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b1c3c278-c406-4008-a908-fc1c1744dd2f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2371.461047] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-c0d128eb-bf9c-4eac-8e3e-ed53ab7b49ed tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 3e67c74a-1879-4e74-afad-cd7446f284b3] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2371.461306] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-c0d128eb-bf9c-4eac-8e3e-ed53ab7b49ed tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 3e67c74a-1879-4e74-afad-cd7446f284b3] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2371.461502] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0d128eb-bf9c-4eac-8e3e-ed53ab7b49ed tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Deleting the datastore file [datastore1] 3e67c74a-1879-4e74-afad-cd7446f284b3 {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2371.461771] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fd9ac3a8-84f4-472b-b7bd-9a46b2934eb4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2371.468168] env[62405]: DEBUG oslo_vmware.api [None req-c0d128eb-bf9c-4eac-8e3e-ed53ab7b49ed tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for the task: (returnval){ [ 2371.468168] env[62405]: value = "task-1948469" [ 2371.468168] env[62405]: _type = "Task" [ 2371.468168] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2371.475498] env[62405]: DEBUG oslo_vmware.api [None req-c0d128eb-bf9c-4eac-8e3e-ed53ab7b49ed tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948469, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2371.977889] env[62405]: DEBUG oslo_vmware.api [None req-c0d128eb-bf9c-4eac-8e3e-ed53ab7b49ed tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948469, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.211253} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2371.978295] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0d128eb-bf9c-4eac-8e3e-ed53ab7b49ed tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2371.978413] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-c0d128eb-bf9c-4eac-8e3e-ed53ab7b49ed tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 3e67c74a-1879-4e74-afad-cd7446f284b3] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2371.978604] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-c0d128eb-bf9c-4eac-8e3e-ed53ab7b49ed tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 3e67c74a-1879-4e74-afad-cd7446f284b3] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2371.978779] env[62405]: INFO nova.compute.manager [None req-c0d128eb-bf9c-4eac-8e3e-ed53ab7b49ed tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 3e67c74a-1879-4e74-afad-cd7446f284b3] Took 1.12 seconds to destroy the instance on the hypervisor. [ 2371.979031] env[62405]: DEBUG oslo.service.loopingcall [None req-c0d128eb-bf9c-4eac-8e3e-ed53ab7b49ed tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2371.979237] env[62405]: DEBUG nova.compute.manager [-] [instance: 3e67c74a-1879-4e74-afad-cd7446f284b3] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2371.979363] env[62405]: DEBUG nova.network.neutron [-] [instance: 3e67c74a-1879-4e74-afad-cd7446f284b3] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2372.301920] env[62405]: DEBUG nova.compute.manager [req-5c2c7f03-d9b6-4bba-836e-70937b1d9d47 req-82639d4d-59f6-44a4-9208-c1e3c6a7a9cc service nova] [instance: 3e67c74a-1879-4e74-afad-cd7446f284b3] Received event network-vif-deleted-8e59c1d7-8667-400c-b30b-9d6aeaec3422 {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2372.301920] env[62405]: INFO nova.compute.manager [req-5c2c7f03-d9b6-4bba-836e-70937b1d9d47 req-82639d4d-59f6-44a4-9208-c1e3c6a7a9cc service nova] [instance: 3e67c74a-1879-4e74-afad-cd7446f284b3] Neutron deleted interface 8e59c1d7-8667-400c-b30b-9d6aeaec3422; detaching it from the instance and deleting it from the info cache [ 2372.301920] env[62405]: DEBUG nova.network.neutron [req-5c2c7f03-d9b6-4bba-836e-70937b1d9d47 req-82639d4d-59f6-44a4-9208-c1e3c6a7a9cc service nova] [instance: 3e67c74a-1879-4e74-afad-cd7446f284b3] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2372.780951] env[62405]: DEBUG nova.network.neutron [-] [instance: 3e67c74a-1879-4e74-afad-cd7446f284b3] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2372.804039] env[62405]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-01fb8e12-a561-4eaa-b755-95ae60d5e865 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2372.814554] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48ae95c9-0050-47cb-a1d7-d05afff7e8a4 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2372.837642] env[62405]: DEBUG nova.compute.manager [req-5c2c7f03-d9b6-4bba-836e-70937b1d9d47 req-82639d4d-59f6-44a4-9208-c1e3c6a7a9cc service nova] [instance: 3e67c74a-1879-4e74-afad-cd7446f284b3] Detach interface failed, port_id=8e59c1d7-8667-400c-b30b-9d6aeaec3422, reason: Instance 3e67c74a-1879-4e74-afad-cd7446f284b3 could not be found. {{(pid=62405) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11483}} [ 2373.284273] env[62405]: INFO nova.compute.manager [-] [instance: 3e67c74a-1879-4e74-afad-cd7446f284b3] Took 1.30 seconds to deallocate network for instance. [ 2373.792660] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c0d128eb-bf9c-4eac-8e3e-ed53ab7b49ed tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2373.793036] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c0d128eb-bf9c-4eac-8e3e-ed53ab7b49ed tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2373.793332] env[62405]: DEBUG nova.objects.instance [None req-c0d128eb-bf9c-4eac-8e3e-ed53ab7b49ed tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Lazy-loading 'resources' on Instance uuid 3e67c74a-1879-4e74-afad-cd7446f284b3 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2374.337448] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec488a64-53fe-4147-87b1-784b0bb2a69a {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2374.345184] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17ee3460-bf1e-47d5-ac9d-d364e4db08d6 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2374.373608] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24849caf-6fcb-48ce-8f11-500694071854 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2374.380791] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37d0c717-0e14-42f3-84e0-fdf689f02f91 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2374.394231] env[62405]: DEBUG nova.compute.provider_tree [None req-c0d128eb-bf9c-4eac-8e3e-ed53ab7b49ed tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 172, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2374.914048] env[62405]: ERROR nova.scheduler.client.report [None req-c0d128eb-bf9c-4eac-8e3e-ed53ab7b49ed tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [req-f4bae324-b021-48a5-844b-ef9b8b4a4dd6] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 172, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 7d5eded7-a501-4fa6-b1d3-60e273d555d7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-f4bae324-b021-48a5-844b-ef9b8b4a4dd6"}]} [ 2374.929510] env[62405]: DEBUG nova.scheduler.client.report [None req-c0d128eb-bf9c-4eac-8e3e-ed53ab7b49ed tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Refreshing inventories for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 2374.941017] env[62405]: DEBUG nova.scheduler.client.report [None req-c0d128eb-bf9c-4eac-8e3e-ed53ab7b49ed tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Updating ProviderTree inventory for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 2374.941283] env[62405]: DEBUG nova.compute.provider_tree [None req-c0d128eb-bf9c-4eac-8e3e-ed53ab7b49ed tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2374.951217] env[62405]: DEBUG nova.scheduler.client.report [None req-c0d128eb-bf9c-4eac-8e3e-ed53ab7b49ed tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Refreshing aggregate associations for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7, aggregates: None {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 2374.968196] env[62405]: DEBUG nova.scheduler.client.report [None req-c0d128eb-bf9c-4eac-8e3e-ed53ab7b49ed tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Refreshing trait associations for resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=62405) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 2374.999529] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b7c0412-a117-4ae5-ba3c-3c2bf575eb01 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2375.006762] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-791b0b88-1ada-4811-870c-aabade4176b1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2375.037117] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da506db1-1ab9-41af-8af0-dddfb02cd613 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2375.043666] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f1770c4-4604-4c8b-bf3e-9c7172931298 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2375.055834] env[62405]: DEBUG nova.compute.provider_tree [None req-c0d128eb-bf9c-4eac-8e3e-ed53ab7b49ed tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 172, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2375.587053] env[62405]: DEBUG nova.scheduler.client.report [None req-c0d128eb-bf9c-4eac-8e3e-ed53ab7b49ed tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Updated inventory for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with generation 205 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 172, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 2375.587347] env[62405]: DEBUG nova.compute.provider_tree [None req-c0d128eb-bf9c-4eac-8e3e-ed53ab7b49ed tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Updating resource provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 generation from 205 to 206 during operation: update_inventory {{(pid=62405) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2375.587530] env[62405]: DEBUG nova.compute.provider_tree [None req-c0d128eb-bf9c-4eac-8e3e-ed53ab7b49ed tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Updating inventory in ProviderTree for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 172, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2376.091994] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c0d128eb-bf9c-4eac-8e3e-ed53ab7b49ed tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.299s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2376.110500] env[62405]: INFO nova.scheduler.client.report [None req-c0d128eb-bf9c-4eac-8e3e-ed53ab7b49ed tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Deleted allocations for instance 3e67c74a-1879-4e74-afad-cd7446f284b3 [ 2376.617712] env[62405]: DEBUG oslo_concurrency.lockutils [None req-c0d128eb-bf9c-4eac-8e3e-ed53ab7b49ed tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Lock "3e67c74a-1879-4e74-afad-cd7446f284b3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.268s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2380.037824] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e3275f9e-bf5e-4f39-8f2c-6eac5404cc86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Acquiring lock "46b794f6-e858-45e6-9977-98ab246482f3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2380.038201] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e3275f9e-bf5e-4f39-8f2c-6eac5404cc86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Lock "46b794f6-e858-45e6-9977-98ab246482f3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2380.038330] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e3275f9e-bf5e-4f39-8f2c-6eac5404cc86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Acquiring lock "46b794f6-e858-45e6-9977-98ab246482f3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2380.038513] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e3275f9e-bf5e-4f39-8f2c-6eac5404cc86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Lock "46b794f6-e858-45e6-9977-98ab246482f3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2380.038680] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e3275f9e-bf5e-4f39-8f2c-6eac5404cc86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Lock "46b794f6-e858-45e6-9977-98ab246482f3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2380.040704] env[62405]: INFO nova.compute.manager [None req-e3275f9e-bf5e-4f39-8f2c-6eac5404cc86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 46b794f6-e858-45e6-9977-98ab246482f3] Terminating instance [ 2380.544813] env[62405]: DEBUG nova.compute.manager [None req-e3275f9e-bf5e-4f39-8f2c-6eac5404cc86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 46b794f6-e858-45e6-9977-98ab246482f3] Start destroying the instance on the hypervisor. {{(pid=62405) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2380.545051] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-e3275f9e-bf5e-4f39-8f2c-6eac5404cc86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 46b794f6-e858-45e6-9977-98ab246482f3] Destroying instance {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2380.546010] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8359946-9543-45aa-8317-e5765c3c0d23 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2380.553625] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3275f9e-bf5e-4f39-8f2c-6eac5404cc86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 46b794f6-e858-45e6-9977-98ab246482f3] Powering off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2380.553838] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-43728c9c-db89-4d86-9ecf-0258a0e5adc3 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2380.559606] env[62405]: DEBUG oslo_vmware.api [None req-e3275f9e-bf5e-4f39-8f2c-6eac5404cc86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for the task: (returnval){ [ 2380.559606] env[62405]: value = "task-1948471" [ 2380.559606] env[62405]: _type = "Task" [ 2380.559606] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2380.567504] env[62405]: DEBUG oslo_vmware.api [None req-e3275f9e-bf5e-4f39-8f2c-6eac5404cc86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948471, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2381.069461] env[62405]: DEBUG oslo_vmware.api [None req-e3275f9e-bf5e-4f39-8f2c-6eac5404cc86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948471, 'name': PowerOffVM_Task, 'duration_secs': 0.178264} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2381.069839] env[62405]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3275f9e-bf5e-4f39-8f2c-6eac5404cc86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 46b794f6-e858-45e6-9977-98ab246482f3] Powered off the VM {{(pid=62405) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2381.069959] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-e3275f9e-bf5e-4f39-8f2c-6eac5404cc86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 46b794f6-e858-45e6-9977-98ab246482f3] Unregistering the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2381.070162] env[62405]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-800211b1-a8fb-4068-9779-93aab74cb9a9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2381.140563] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-e3275f9e-bf5e-4f39-8f2c-6eac5404cc86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 46b794f6-e858-45e6-9977-98ab246482f3] Unregistered the VM {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2381.140798] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-e3275f9e-bf5e-4f39-8f2c-6eac5404cc86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 46b794f6-e858-45e6-9977-98ab246482f3] Deleting contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2381.140957] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3275f9e-bf5e-4f39-8f2c-6eac5404cc86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Deleting the datastore file [datastore1] 46b794f6-e858-45e6-9977-98ab246482f3 {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2381.141289] env[62405]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b81c3494-7357-434c-b3a3-5981272b3431 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2381.147705] env[62405]: DEBUG oslo_vmware.api [None req-e3275f9e-bf5e-4f39-8f2c-6eac5404cc86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for the task: (returnval){ [ 2381.147705] env[62405]: value = "task-1948473" [ 2381.147705] env[62405]: _type = "Task" [ 2381.147705] env[62405]: } to complete. {{(pid=62405) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2381.154879] env[62405]: DEBUG oslo_vmware.api [None req-e3275f9e-bf5e-4f39-8f2c-6eac5404cc86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948473, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2381.657132] env[62405]: DEBUG oslo_vmware.api [None req-e3275f9e-bf5e-4f39-8f2c-6eac5404cc86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Task: {'id': task-1948473, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.138096} completed successfully. {{(pid=62405) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2381.657397] env[62405]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3275f9e-bf5e-4f39-8f2c-6eac5404cc86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Deleted the datastore file {{(pid=62405) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2381.657568] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-e3275f9e-bf5e-4f39-8f2c-6eac5404cc86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 46b794f6-e858-45e6-9977-98ab246482f3] Deleted contents of the VM from datastore datastore1 {{(pid=62405) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2381.657748] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-e3275f9e-bf5e-4f39-8f2c-6eac5404cc86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 46b794f6-e858-45e6-9977-98ab246482f3] Instance destroyed {{(pid=62405) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2381.657925] env[62405]: INFO nova.compute.manager [None req-e3275f9e-bf5e-4f39-8f2c-6eac5404cc86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] [instance: 46b794f6-e858-45e6-9977-98ab246482f3] Took 1.11 seconds to destroy the instance on the hypervisor. [ 2381.658197] env[62405]: DEBUG oslo.service.loopingcall [None req-e3275f9e-bf5e-4f39-8f2c-6eac5404cc86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62405) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2381.658400] env[62405]: DEBUG nova.compute.manager [-] [instance: 46b794f6-e858-45e6-9977-98ab246482f3] Deallocating network for instance {{(pid=62405) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2381.658494] env[62405]: DEBUG nova.network.neutron [-] [instance: 46b794f6-e858-45e6-9977-98ab246482f3] deallocate_for_instance() {{(pid=62405) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2381.924193] env[62405]: DEBUG nova.compute.manager [req-68733b6f-e6f5-44a0-a14e-af5200cb1f82 req-f983bfaa-fe45-4a17-bbb9-c5142fa1e23b service nova] [instance: 46b794f6-e858-45e6-9977-98ab246482f3] Received event network-vif-deleted-1cb8209b-2a23-499d-b852-91ad4d89784e {{(pid=62405) external_instance_event /opt/stack/nova/nova/compute/manager.py:11649}} [ 2381.924473] env[62405]: INFO nova.compute.manager [req-68733b6f-e6f5-44a0-a14e-af5200cb1f82 req-f983bfaa-fe45-4a17-bbb9-c5142fa1e23b service nova] [instance: 46b794f6-e858-45e6-9977-98ab246482f3] Neutron deleted interface 1cb8209b-2a23-499d-b852-91ad4d89784e; detaching it from the instance and deleting it from the info cache [ 2381.924597] env[62405]: DEBUG nova.network.neutron [req-68733b6f-e6f5-44a0-a14e-af5200cb1f82 req-f983bfaa-fe45-4a17-bbb9-c5142fa1e23b service nova] [instance: 46b794f6-e858-45e6-9977-98ab246482f3] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2382.405033] env[62405]: DEBUG nova.network.neutron [-] [instance: 46b794f6-e858-45e6-9977-98ab246482f3] Updating instance_info_cache with network_info: [] {{(pid=62405) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2382.426630] env[62405]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-adceca26-96a9-4d66-a369-d20608e5b9ad {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2382.436326] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df1f5f8c-7155-4b9a-bd91-6f3d8555afd1 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2382.459380] env[62405]: DEBUG nova.compute.manager [req-68733b6f-e6f5-44a0-a14e-af5200cb1f82 req-f983bfaa-fe45-4a17-bbb9-c5142fa1e23b service nova] [instance: 46b794f6-e858-45e6-9977-98ab246482f3] Detach interface failed, port_id=1cb8209b-2a23-499d-b852-91ad4d89784e, reason: Instance 46b794f6-e858-45e6-9977-98ab246482f3 could not be found. {{(pid=62405) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11483}} [ 2382.907855] env[62405]: INFO nova.compute.manager [-] [instance: 46b794f6-e858-45e6-9977-98ab246482f3] Took 1.25 seconds to deallocate network for instance. [ 2383.414456] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e3275f9e-bf5e-4f39-8f2c-6eac5404cc86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2383.414771] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e3275f9e-bf5e-4f39-8f2c-6eac5404cc86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2383.414941] env[62405]: DEBUG nova.objects.instance [None req-e3275f9e-bf5e-4f39-8f2c-6eac5404cc86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Lazy-loading 'resources' on Instance uuid 46b794f6-e858-45e6-9977-98ab246482f3 {{(pid=62405) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2383.948264] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89a24ee0-e959-43cc-931b-7959c2b6466e {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2383.955956] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a27ca358-6a99-4815-bfe9-d52e45658da7 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2383.985195] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8db8c4a7-b5ee-4810-9626-7a2e6ec9ce90 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2383.992710] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72a97af2-8b7f-4fe9-9741-0350c856d274 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2384.005645] env[62405]: DEBUG nova.compute.provider_tree [None req-e3275f9e-bf5e-4f39-8f2c-6eac5404cc86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2384.508371] env[62405]: DEBUG nova.scheduler.client.report [None req-e3275f9e-bf5e-4f39-8f2c-6eac5404cc86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 172, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2385.013056] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e3275f9e-bf5e-4f39-8f2c-6eac5404cc86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.598s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2385.038338] env[62405]: INFO nova.scheduler.client.report [None req-e3275f9e-bf5e-4f39-8f2c-6eac5404cc86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Deleted allocations for instance 46b794f6-e858-45e6-9977-98ab246482f3 [ 2385.545933] env[62405]: DEBUG oslo_concurrency.lockutils [None req-e3275f9e-bf5e-4f39-8f2c-6eac5404cc86 tempest-ServerActionsTestOtherA-874534748 tempest-ServerActionsTestOtherA-874534748-project-member] Lock "46b794f6-e858-45e6-9977-98ab246482f3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.508s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2391.757074] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2391.757531] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Getting list of instances from cluster (obj){ [ 2391.757531] env[62405]: value = "domain-c8" [ 2391.757531] env[62405]: _type = "ClusterComputeResource" [ 2391.757531] env[62405]: } {{(pid=62405) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 2391.758572] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1c0d1a3-8378-4266-a252-15c8cf4fd3a9 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2391.767117] env[62405]: DEBUG nova.virt.vmwareapi.vmops [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Got total of 0 instances {{(pid=62405) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 2393.411310] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2396.401791] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2397.401622] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2398.401546] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2399.401527] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2400.401439] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2400.401866] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62405) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11065}} [ 2401.401578] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2401.401959] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Starting heal instance info cache {{(pid=62405) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10446}} [ 2402.911363] env[62405]: DEBUG nova.compute.manager [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Didn't find any instances for network info cache update. {{(pid=62405) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10532}} [ 2403.905276] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2404.401660] env[62405]: DEBUG oslo_service.periodic_task [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Running periodic task ComputeManager.update_available_resource {{(pid=62405) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2404.904725] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2404.904937] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2404.905119] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2404.905281] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62405) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2404.906156] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-740c2395-4ddc-4d0c-97ec-e73bacb8e849 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2404.914536] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31de8b41-2138-4f55-83a0-eb4e106d064f {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2404.927732] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a0ca17f-37e7-492f-a71f-15b0a51384b8 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2404.933942] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db5a397f-bd2e-4049-8dff-04db659e872d {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2404.962504] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180748MB free_disk=172GB free_vcpus=48 pci_devices=None {{(pid=62405) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2404.962675] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2404.962834] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2406.119255] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62405) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2406.119528] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62405) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2406.137011] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72d839ca-fa0c-4f0e-af80-562cf1c0f6f2 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2406.144539] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fee15a49-ae00-4715-ad32-6700cdc6acbe {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2406.173065] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98f24eb7-3b80-4e9f-a1e7-e481b5374d53 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2406.179804] env[62405]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cb1da55-af97-4e7a-a4c3-d907008c6808 {{(pid=62405) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2406.193323] env[62405]: DEBUG nova.compute.provider_tree [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Inventory has not changed in ProviderTree for provider: 7d5eded7-a501-4fa6-b1d3-60e273d555d7 {{(pid=62405) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2406.698067] env[62405]: DEBUG nova.scheduler.client.report [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Inventory has not changed for provider 7d5eded7-a501-4fa6-b1d3-60e273d555d7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 172, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62405) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 2407.201834] env[62405]: DEBUG nova.compute.resource_tracker [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62405) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2407.202271] env[62405]: DEBUG oslo_concurrency.lockutils [None req-134b2d26-2cd2-44fc-937f-f3b89a2d702a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.239s {{(pid=62405) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}}